m_mltConsumer->connect(*m_mltProducer);
m_mltProducer->set_speed(0.0);
m_refreshTimer.setSingleShot(true);
- m_refreshTimer.setInterval(70);
+ m_refreshTimer.setInterval(100);
connect(&m_refreshTimer, SIGNAL(timeout()), this, SLOT(refresh()));
connect(this, SIGNAL(multiStreamFound(const QString &,QList<int>,QList<int>,stringMap)), this, SLOT(slotMultiStreamProducerFound(const QString &,QList<int>,QList<int>,stringMap)));
}
delete m_blackClip;
m_blackClip = NULL;
- //TODO: uncomment following line when everything is clean
- // uncommented Feb 2011 --Granjow
- if (m_mltProfile) delete m_mltProfile;
m_activeProfile = profileName;
- char *tmp = qstrdup(m_activeProfile.toUtf8().constData());
- setenv("MLT_PROFILE", tmp, 1);
- m_mltProfile = new Mlt::Profile(tmp);
+ if (m_mltProfile) {
+ Mlt::Profile tmpProfile(m_activeProfile.toUtf8().constData());
+ m_mltProfile->set_colorspace(tmpProfile.colorspace());
+ m_mltProfile->set_frame_rate(tmpProfile.frame_rate_num(), tmpProfile.frame_rate_den());
+ m_mltProfile->set_height(tmpProfile.height());
+ m_mltProfile->set_width(tmpProfile.width());
+ m_mltProfile->set_progressive(tmpProfile.progressive());
+ m_mltProfile->set_sample_aspect(tmpProfile.sample_aspect_num(), tmpProfile.sample_aspect_den());
+ m_mltProfile->get_profile()->display_aspect_num = tmpProfile.display_aspect_num();
+ m_mltProfile->get_profile()->display_aspect_den = tmpProfile.display_aspect_den();
+ }
+ else {
+ m_mltProfile = new Mlt::Profile(m_activeProfile.toUtf8().constData());
+ }
+ setenv("MLT_PROFILE", m_activeProfile.toUtf8().constData(), 1);
m_mltProfile->set_explicit(true);
- delete[] tmp;
m_blackClip = new Mlt::Producer(*m_mltProfile, "colour", "black");
m_blackClip->set("id", "black");
if (device >= 0) {
if (BMInterface::isSupportedProfile(device, profileProperties)) {
QString decklink = "decklink:" + QString::number(KdenliveSettings::blackmagic_output_device());
- tmp = qstrdup(decklink.toUtf8().constData());
- m_mltConsumer = new Mlt::Consumer(*m_mltProfile, tmp);
- delete[] tmp;
+ if (!m_mltConsumer) {
+ m_mltConsumer = new Mlt::Consumer(*m_mltProfile, decklink.toUtf8().constData());
+ m_showFrameEvent = m_mltConsumer->listen("consumer-frame-show", this, (mlt_listener) consumer_frame_show);
+ mlt_log_set_callback(kdenlive_callback);
+ }
if (m_mltConsumer->is_valid()) {
m_externalConsumer = true;
- m_showFrameEvent = m_mltConsumer->listen("consumer-frame-show", this, (mlt_listener) consumer_frame_show);
m_mltConsumer->set("terminate_on_pause", 0);
m_mltConsumer->set("deinterlace_method", "onefield");
m_mltConsumer->set("real_time", KdenliveSettings::mltthreads());
- mlt_log_set_callback(kdenlive_callback);
}
if (m_mltConsumer && m_mltConsumer->is_valid()) return;
} else KMessageBox::informationList(qApp->activeWindow(), i18n("Your project's profile %1 is not compatible with the blackmagic output card. Please see supported profiles below. Switching to normal video display.", m_mltProfile->description()), BMInterface::supportedModes(KdenliveSettings::blackmagic_output_device()));
//m_mltConsumer->set("fullscreen", 1);
if (m_winid == 0) {
// OpenGL monitor
- m_mltConsumer = new Mlt::Consumer(*m_mltProfile, "sdl_audio");
+ if (!m_mltConsumer) {
+ m_mltConsumer = new Mlt::Consumer(*m_mltProfile, "sdl_audio");
+ m_showFrameEvent = m_mltConsumer->listen("consumer-frame-show", this, (mlt_listener) consumer_gl_frame_show);
+ }
m_mltConsumer->set("preview_off", 1);
m_mltConsumer->set("preview_format", mlt_image_rgb24a);
- m_showFrameEvent = m_mltConsumer->listen("consumer-frame-show", this, (mlt_listener) consumer_gl_frame_show);
} else {
- m_mltConsumer = new Mlt::Consumer(*m_mltProfile, "sdl_preview");
- m_showFrameEvent = m_mltConsumer->listen("consumer-frame-show", this, (mlt_listener) consumer_frame_show);
- m_pauseEvent = m_mltConsumer->listen("consumer-sdl-paused", this, (mlt_listener) consumer_paused);
- m_mltConsumer->set("window_id", m_winid);
+ if (!m_mltConsumer) {
+ m_mltConsumer = new Mlt::Consumer(*m_mltProfile, "sdl_preview");
+ m_showFrameEvent = m_mltConsumer->listen("consumer-frame-show", this, (mlt_listener) consumer_frame_show);
+ m_pauseEvent = m_mltConsumer->listen("consumer-sdl-paused", this, (mlt_listener) consumer_paused);
+ }
+ m_mltConsumer->set("window_id", m_winid);
}
m_mltConsumer->set("resize", 1);
m_mltConsumer->set("terminate_on_pause", 1);
if (m_isSplitView) slotSplitView(false);
if (!m_mltConsumer->is_stopped()) m_mltConsumer->stop();
m_mltConsumer->purge();
- if (m_showFrameEvent) delete m_showFrameEvent;
- m_showFrameEvent = NULL;
- if (m_pauseEvent) delete m_pauseEvent;
- m_pauseEvent = NULL;
- delete m_mltConsumer;
- m_mltConsumer = NULL;
}
QString scene;
if (!dropSceneList) scene = sceneList();
transition->set("mlt_service", "composite");
transition->set("a_track", 0);
transition->set("b_track", i);
- transition->set("distort", 1);
+ transition->set("distort", 0);
+ transition->set("aligned", 0);
transition->set("internal_added", "200");
QString geometry;
switch (screen) {
KUrl url(path);
Mlt::Producer *producer = NULL;
CLIPTYPE type = (CLIPTYPE)info.xml.attribute("type").toInt();
-
if (type == COLOR) {
producer = new Mlt::Producer(*m_mltProfile, 0, ("colour:" + info.xml.attribute("colour")).toUtf8().constData());
} else if (type == TEXT) {
if (producer && producer->is_valid() && info.xml.hasAttribute("xmldata"))
producer->set("xmldata", info.xml.attribute("xmldata").toUtf8().constData());
} else if (url.isEmpty()) {
+ //WARNING: when is this case used? Not sure it is working.. JBM/
QDomDocument doc;
QDomElement mlt = doc.createElement("mlt");
QDomElement play = doc.createElement("playlist");
+ play.setAttribute("id", "playlist0");
doc.appendChild(mlt);
mlt.appendChild(play);
play.appendChild(doc.importNode(info.xml, true));
+ QDomElement tractor = doc.createElement("tractor");
+ tractor.setAttribute("id", "tractor0");
+ QDomElement track = doc.createElement("track");
+ track.setAttribute("producer", "playlist0");
+ tractor.appendChild(track);
+ mlt.appendChild(tractor);
producer = new Mlt::Producer(*m_mltProfile, "xml-string", doc.toString().toUtf8().constData());
} else {
- producer = new Mlt::Producer(*m_mltProfile, path.toUtf8().constData());
+ producer = new Mlt::Producer(*m_mltProfile, path.toUtf8().constData());
}
if (producer == NULL || producer->is_blank() || !producer->is_valid()) {
if (type == COLOR || type == TEXT || type == IMAGE || type == SLIDESHOW) {
int length;
if (info.xml.hasAttribute("length")) {
- if (clipOut > 0) duration = clipOut + 1;
length = info.xml.attribute("length").toInt();
clipOut = length - 1;
}
else length = info.xml.attribute("out").toInt() - info.xml.attribute("in").toInt();
producer->set("length", length);
+ duration = length;
}
if (clipOut > 0) producer->set_in_and_out(info.xml.attribute("in").toInt(), clipOut);
char property[200];
if (frameNumber > 0) producer->seek(frameNumber);
-
duration = duration > 0 ? duration : producer->get_playtime();
filePropertyMap["duration"] = QString::number(duration);
//kDebug() << "/////// PRODUCER: " << url.path() << " IS: " << producer->get_playtime();
}
}
- // Get frame rate
- int vindex = producer->get_int("video_index");
-
- // List streams
- int streams = producer->get_int("meta.media.nb_streams");
- QList <int> audio_list;
- QList <int> video_list;
- for (int i = 0; i < streams; i++) {
- QByteArray propertyName = QString("meta.media.%1.stream.type").arg(i).toLocal8Bit();
- QString type = producer->get(propertyName.data());
- if (type == "audio") audio_list.append(i);
- else if (type == "video") video_list.append(i);
+ int vindex = -1;
+ const QString mltService = producer->get("mlt_service");
+ if (mltService == "xml" || mltService == "consumer") {
+ // MLT playlist, create producer with blank profile to get real profile info
+ // TODO: is there an easier way to get this info (original source clip profile) from MLT?
+ Mlt::Profile *original_profile = new Mlt::Profile();
+ Mlt::Producer *tmpProd = new Mlt::Producer(*original_profile, path.toUtf8().constData());
+ filePropertyMap["progressive"] = QString::number(original_profile->progressive());
+ filePropertyMap["colorspace"] = QString::number(original_profile->colorspace());
+ filePropertyMap["fps"] = QString::number(original_profile->fps());
+ filePropertyMap["aspect_ratio"] = QString::number(original_profile->sar());
+ delete tmpProd;
+ delete original_profile;
}
+ else if (mltService == "avformat") {
+ // Get frame rate
+ vindex = producer->get_int("video_index");
+
+ // List streams
+ int streams = producer->get_int("meta.media.nb_streams");
+ QList <int> audio_list;
+ QList <int> video_list;
+ for (int i = 0; i < streams; i++) {
+ QByteArray propertyName = QString("meta.media.%1.stream.type").arg(i).toLocal8Bit();
+ QString type = producer->get(propertyName.data());
+ if (type == "audio") audio_list.append(i);
+ else if (type == "video") video_list.append(i);
+ }
- if (!info.xml.hasAttribute("video_index") && video_list.count() > 1) {
- // Clip has more than one video stream, ask which one should be used
- QMap <QString, QString> data;
- if (info.xml.hasAttribute("group")) data.insert("group", info.xml.attribute("group"));
- if (info.xml.hasAttribute("groupId")) data.insert("groupId", info.xml.attribute("groupId"));
- emit multiStreamFound(path, audio_list, video_list, data);
- // Force video index so that when reloading the clip we don't ask again for other streams
- filePropertyMap["video_index"] = QString::number(vindex);
- }
+ if (!info.xml.hasAttribute("video_index") && video_list.count() > 1) {
+ // Clip has more than one video stream, ask which one should be used
+ QMap <QString, QString> data;
+ if (info.xml.hasAttribute("group")) data.insert("group", info.xml.attribute("group"));
+ if (info.xml.hasAttribute("groupId")) data.insert("groupId", info.xml.attribute("groupId"));
+ emit multiStreamFound(path, audio_list, video_list, data);
+ // Force video index so that when reloading the clip we don't ask again for other streams
+ filePropertyMap["video_index"] = QString::number(vindex);
+ }
- if (vindex > -1) {
- snprintf(property, sizeof(property), "meta.media.%d.stream.frame_rate", vindex);
- if (producer->get(property))
- filePropertyMap["fps"] = producer->get(property);
- }
+ if (vindex > -1) {
+ snprintf(property, sizeof(property), "meta.media.%d.stream.frame_rate", vindex);
+ if (producer->get(property))
+ filePropertyMap["fps"] = producer->get(property);
+ }
- if (!filePropertyMap.contains("fps")) {
- if (producer->get_double("meta.media.frame_rate_den") > 0) {
- filePropertyMap["fps"] = locale.toString(producer->get_double("meta.media.frame_rate_num") / producer->get_double("meta.media.frame_rate_den"));
- } else filePropertyMap["fps"] = producer->get("source_fps");
- }
+ if (!filePropertyMap.contains("fps")) {
+ if (producer->get_double("meta.media.frame_rate_den") > 0) {
+ filePropertyMap["fps"] = locale.toString(producer->get_double("meta.media.frame_rate_num") / producer->get_double("meta.media.frame_rate_den"));
+ } else filePropertyMap["fps"] = producer->get("source_fps");
+ }
+ }
Mlt::Frame *frame = producer->get_frame();
if (frame && frame->is_valid()) {
filePropertyMap["frame_size"] = QString::number(frame->get_int("width")) + 'x' + QString::number(frame->get_int("height"));
filePropertyMap["frequency"] = QString::number(frame->get_int("frequency"));
filePropertyMap["channels"] = QString::number(frame->get_int("channels"));
- filePropertyMap["aspect_ratio"] = frame->get("aspect_ratio");
+ if (!filePropertyMap.contains("aspect_ratio")) filePropertyMap["aspect_ratio"] = frame->get("aspect_ratio");
if (frame->get_int("test_image") == 0) {
- if (url.path().endsWith(".mlt") || url.path().endsWith(".westley") || url.path().endsWith(".kdenlive")) {
+ if (mltService == "xml" || mltService == "consumer") {
filePropertyMap["type"] = "playlist";
metadataPropertyMap["comment"] = QString::fromUtf8(producer->get("title"));
} else if (frame->get_int("test_audio") == 0)
// Retrieve audio / video codec name
// If there is a
- if (vindex > -1) {
- /*if (context->duration == AV_NOPTS_VALUE) {
- kDebug() << " / / / / / / / /ERROR / / / CLIP HAS UNKNOWN DURATION";
- emit removeInvalidClip(clipId);
- delete producer;
- return;
- }*/
- // Get the video_index
- int video_max = 0;
- int default_audio = producer->get_int("audio_index");
- int audio_max = 0;
-
- int scan = producer->get_int("meta.media.progressive");
- filePropertyMap["progressive"] = QString::number(scan);
-
- // Find maximum stream index values
- for (int ix = 0; ix < producer->get_int("meta.media.nb_streams"); ix++) {
- snprintf(property, sizeof(property), "meta.media.%d.stream.type", ix);
- QString type = producer->get(property);
- if (type == "video")
- video_max = ix;
- else if (type == "audio")
- audio_max = ix;
- }
- filePropertyMap["default_video"] = QString::number(vindex);
- filePropertyMap["video_max"] = QString::number(video_max);
- filePropertyMap["default_audio"] = QString::number(default_audio);
- filePropertyMap["audio_max"] = QString::number(audio_max);
-
- snprintf(property, sizeof(property), "meta.media.%d.codec.long_name", vindex);
- if (producer->get(property)) {
- filePropertyMap["videocodec"] = producer->get(property);
- } else {
- snprintf(property, sizeof(property), "meta.media.%d.codec.name", vindex);
- if (producer->get(property))
- filePropertyMap["videocodec"] = producer->get(property);
- }
- QString query;
- query = QString("meta.media.%1.codec.pix_fmt").arg(vindex);
- filePropertyMap["pix_fmt"] = producer->get(query.toUtf8().constData());
- filePropertyMap["colorspace"] = producer->get("meta.media.colorspace");
-
- } else kDebug() << " / / / / /WARNING, VIDEO CONTEXT IS NULL!!!!!!!!!!!!!!";
- if (producer->get_int("audio_index") > -1) {
- // Get the audio_index
- int index = producer->get_int("audio_index");
- snprintf(property, sizeof(property), "meta.media.%d.codec.long_name", index);
- if (producer->get(property)) {
- filePropertyMap["audiocodec"] = producer->get(property);
- } else {
- snprintf(property, sizeof(property), "meta.media.%d.codec.name", index);
- if (producer->get(property))
- filePropertyMap["audiocodec"] = producer->get(property);
- }
- }
+ if (mltService == "avformat") {
+ if (vindex > -1) {
+ /*if (context->duration == AV_NOPTS_VALUE) {
+ kDebug() << " / / / / / / / /ERROR / / / CLIP HAS UNKNOWN DURATION";
+ emit removeInvalidClip(clipId);
+ delete producer;
+ return;
+ }*/
+ // Get the video_index
+ int video_max = 0;
+ int default_audio = producer->get_int("audio_index");
+ int audio_max = 0;
+
+ int scan = producer->get_int("meta.media.progressive");
+ filePropertyMap["progressive"] = QString::number(scan);
+
+ // Find maximum stream index values
+ for (int ix = 0; ix < producer->get_int("meta.media.nb_streams"); ix++) {
+ snprintf(property, sizeof(property), "meta.media.%d.stream.type", ix);
+ QString type = producer->get(property);
+ if (type == "video")
+ video_max = ix;
+ else if (type == "audio")
+ audio_max = ix;
+ }
+ filePropertyMap["default_video"] = QString::number(vindex);
+ filePropertyMap["video_max"] = QString::number(video_max);
+ filePropertyMap["default_audio"] = QString::number(default_audio);
+ filePropertyMap["audio_max"] = QString::number(audio_max);
+
+ snprintf(property, sizeof(property), "meta.media.%d.codec.long_name", vindex);
+ if (producer->get(property)) {
+ filePropertyMap["videocodec"] = producer->get(property);
+ } else {
+ snprintf(property, sizeof(property), "meta.media.%d.codec.name", vindex);
+ if (producer->get(property))
+ filePropertyMap["videocodec"] = producer->get(property);
+ }
+ QString query;
+ query = QString("meta.media.%1.codec.pix_fmt").arg(vindex);
+ filePropertyMap["pix_fmt"] = producer->get(query.toUtf8().constData());
+ filePropertyMap["colorspace"] = producer->get("meta.media.colorspace");
+
+ } else kDebug() << " / / / / /WARNING, VIDEO CONTEXT IS NULL!!!!!!!!!!!!!!";
+ if (producer->get_int("audio_index") > -1) {
+ // Get the audio_index
+ int index = producer->get_int("audio_index");
+ snprintf(property, sizeof(property), "meta.media.%d.codec.long_name", index);
+ if (producer->get(property)) {
+ filePropertyMap["audiocodec"] = producer->get(property);
+ } else {
+ snprintf(property, sizeof(property), "meta.media.%d.codec.name", index);
+ if (producer->get(property))
+ filePropertyMap["audiocodec"] = producer->get(property);
+ }
+ }
+ }
// metadata
Mlt::Properties metadata;
emit stopped();
if (position == -1 && producer->get("id") == currentId) position = consumerPosition;
if (position != -1) producer->seek(position);
+ m_fps = producer->get_fps();
int volume = KdenliveSettings::volume();
+ if (producer->get_int("_audioclip") == 1) {
+ // This is an audio only clip, create fake multitrack to apply audiowave filter
+ Mlt::Tractor *tractor = new Mlt::Tractor();
+ Mlt::Producer *color= new Mlt::Producer(*m_mltProfile, "color:red");
+ color->set_in_and_out(0, producer->get_out());
+ tractor->set_track(*producer, 0);
+ tractor->set_track(*color, 1);
+
+ Mlt::Consumer xmlConsumer(*m_mltProfile, "xml:audio_hack");
+ if (!xmlConsumer.is_valid()) return -1;
+ xmlConsumer.set("terminate_on_pause", 1);
+ xmlConsumer.connect(tractor->parent());
+ xmlConsumer.run();
+ delete tractor;
+ delete color;
+ delete producer;
+ QString playlist = QString::fromUtf8(xmlConsumer.get("audio_hack"));
+
+ Mlt::Producer *result = new Mlt::Producer(*m_mltProfile, "xml-string", playlist.toUtf8().constData());
+ Mlt::Filter *filter = new Mlt::Filter(*m_mltProfile, "audiowave");
+ result->attach(*filter);
+ tractor = new Mlt::Tractor();
+ tractor->set_track(*result, 0);
+ delete result;
+ delete filter;
+ producer = &(tractor->parent());
+ m_mltConsumer->connect(*producer);
+ }
+
producer->set("meta.volume", (double)volume / 100);
- m_fps = producer->get_fps();
blockSignals(false);
m_mltConsumer->connect(*producer);
blockSignals(true);
m_locale = QLocale();
-
m_mltProducer = new Mlt::Producer(*m_mltProfile, "xml-string", playlist.toUtf8().constData());
if (!m_mltProducer || !m_mltProducer->is_valid()) {
kDebug() << " WARNING - - - - -INVALID PLAYLIST: " << playlist.toUtf8().constData();
return;
if (m_mltConsumer) {
if (m_mltConsumer->is_stopped()) m_mltConsumer->start();
- m_mltConsumer->purge();
+ //m_mltConsumer->purge();
m_mltConsumer->set("refresh", 1);
}
}
GenTime Render::seekPosition() const
{
- if (m_mltProducer) return GenTime((int) m_mltProducer->position(), m_fps);
+ if (m_mltConsumer) return GenTime((int) m_mltConsumer->position(), m_fps);
+ //if (m_mltProducer) return GenTime((int) m_mltProducer->position(), m_fps);
else return GenTime();
}
const uchar* image = frame.get_image(format, width, height);
QImage qimage(width, height, QImage::Format_ARGB32_Premultiplied);
memcpy(qimage.scanLine(0), image, width * height * 4);
-
- /*mlt_image_format format = mlt_image_rgb24;
- int width = 0;
- int height = 0;
- const uchar* image = frame.get_image(format, width, height);
- QImage qimage(width, height, QImage::Format_RGB888);
- memcpy(qimage.bits(), image, width * height * 3);*/
emit frameUpdated(qimage.rgbSwapped());
}
Mlt::Producer trackProducer(tt);
delete tt;
Mlt::Playlist trackPlaylist((mlt_playlist) trackProducer.get_service());
+ if (!trackPlaylist.is_valid()) continue;
int clipNb = trackPlaylist.count();
for (int i = 0; i < clipNb; i++) {
Mlt::Producer *c = trackPlaylist.get_clip(i);
Mlt::Producer trackProducer(tt);
delete tt;
Mlt::Playlist trackPlaylist((mlt_playlist) trackProducer.get_service());
+ if (!trackPlaylist.is_valid()) continue;
int clipNb = trackPlaylist.count();
for (int i = 0; i < clipNb; i++) {
Mlt::Producer *c = trackPlaylist.get_clip(i);
}
void Render::slotMultiStreamProducerFound(const QString path, QList<int> audio_list, QList<int> video_list, stringMap data)
-{
+{
+ if (KdenliveSettings::automultistreams()) {
+ for (int i = 1; i < video_list.count(); i++) {
+ int vindex = video_list.at(i);
+ int aindex = 0;
+ if (i <= audio_list.count() -1) {
+ aindex = audio_list.at(i);
+ }
+ data.insert("video_index", QString::number(vindex));
+ data.insert("audio_index", QString::number(aindex));
+ data.insert("bypassDuplicate", "1");
+ emit addClip(KUrl(path), data);
+ }
+ return;
+ }
+
int width = 60.0 * m_mltProfile->dar();
int swidth = 60.0 * m_mltProfile->width() / m_mltProfile->height();
if (width % 2 == 1) width++;
for (int j = 1; j < video_list.count(); j++) {
Mlt::Producer multiprod(* m_mltProfile, path.toUtf8().constData());
multiprod.set("video_index", video_list.at(j));
- kDebug()<<"// LOADING: "<<j<<" = "<<video_list.at(j);
QImage thumb = KThumb::getFrame(&multiprod, 0, swidth, width, 60);
QGroupBox *streamFrame = new QGroupBox(i18n("Video stream %1", video_list.at(j)), content);
streamFrame->setProperty("vindex", video_list.at(j));
groupList << streamFrame;
streamFrame->setCheckable(true);
- streamFrame->setChecked(false);
+ streamFrame->setChecked(true);
QVBoxLayout *vh = new QVBoxLayout( streamFrame );
QLabel *iconLabel = new QLabel(content);
iconLabel->setPixmap(QPixmap::fromImage(thumb));
cb->addItem(i18n("Audio stream %1", audio_list.at(k)), audio_list.at(k));
}
comboList << cb;
- cb->setCurrentIndex(j);
+ cb->setCurrentIndex(qMin(j, audio_list.count() - 1));
vh->addWidget(cb);
}
vbox->addWidget(streamFrame);