m_blackClip(NULL),
m_winid(winid)
{
- /*if (rendererName == "project") m_monitorId = 10000;
- else m_monitorId = 10001;*/
- /*m_osdTimer = new QTimer(this);
- connect(m_osdTimer, SIGNAL(timeout()), this, SLOT(slotOsdTimeout()));*/
if (profile.isEmpty()) profile = KdenliveSettings::current_profile();
buildConsumer(profile);
}
}
-
- filePropertyMap["fps"] = producer->get("source_fps");
+ if (producer->get_double("meta.media.frame_rate_den") > 0) {
+ filePropertyMap["fps"] = QString::number(producer->get_double("meta.media.frame_rate_num") / producer->get_double("meta.media.frame_rate_den"));
+ } else filePropertyMap["fps"] = producer->get("source_fps");
if (frame && frame->is_valid()) {
filePropertyMap["frame_size"] = QString::number(frame->get_int("width")) + 'x' + QString::number(frame->get_int("height"));
if (producer->get(property))
filePropertyMap["videocodec"] = producer->get(property);
}
+ QString query;
+ query = QString("meta.media.%1.codec.pix_fmt").arg(default_video);
+ filePropertyMap["pix_fmt"] = producer->get(query.toUtf8().constData());
if (KdenliveSettings::dropbframes()) {
kDebug() << "// LOOKING FOR H264 on: " << default_video;
if (m_isLoopMode) play(m_loopStart);
else if (m_isZoneMode) resetZoneMode();
emit rendererStopped((int) pos);
- //if (qApp->activeWindow()) QApplication::postEvent(qApp->activeWindow(), new PositionChangeEvent(GenTime((int) pos, m_fps), m_monitorId + 100));
- //new QCustomEvent(10002));
}
}
void Render::showAudio(Mlt::Frame& frame)
{
if (!frame.is_valid() || frame.get_int("test_audio") != 0) return;
- mlt_audio_format audio_format = mlt_audio_pcm;
+ mlt_audio_format audio_format = mlt_audio_s16;
int freq = 0;
int num_channels = 0;
int samples = 0;
- uint8_t* data = (uint8_t*)frame.get_audio(audio_format, freq, num_channels, samples);
+ int16_t* data = (int16_t*)frame.get_audio(audio_format, freq, num_channels, samples);
if (!data)
return;
- int value = 0;
+ int num_samples = samples > 200 ? 200 : samples;
QByteArray channels;
for (int i = 0; i < num_channels; i++) {
- /* switch (audio_format)
- {
- case 0:
- value=( ( (uint8_t*)data) [i] );
- break;
- case 1:
- value=( ( (uint16_t*)data) [i] >> 8 );
- break;
- case 2:
- value=( ((uint32_t*)data) [i] >> 16 );
- break;
- case 3:
- value=( ((float*)data) [i]*255);
- break;
- default:
- value=0;
- }
- */
long val = 0;
- int num_samples = 20;
- for (int s = 0; s < samples; s += samples / num_samples) {
- val += (data[i+s*num_channels] - 127);
+ for (int s = 0; s < num_samples; s ++) {
+ val += abs(data[i+s*num_channels] / 128);
}
channels.append(val / num_samples);
}
if (samples > 0)
emit showAudioSignal(channels);
+ else
+ emit showAudioSignal(QByteArray());
}
/*
int newDuration = (int) clipDuration.frames(m_fps) - 1;
int diff = newDuration - (trackPlaylist.clip_length(clipIndex) - 1);
- int currentOut;
- if (info.cropStart < GenTime())
- currentOut = newDuration - info.cropStart.frames(m_fps);
- else
- currentOut = newDuration + previousStart;
+ int currentOut = newDuration + previousStart;
if (currentOut > clip->get_length()) {
clip->parent().set("length", currentOut + 1);
clip->parent().set("out", currentOut);
m_isBlocked = true;
previousStart += moveFrame;
- if (previousStart < 0) {
- // special case, in point becoming negative (resizing images)
- previousOut -= previousStart;
- previousStart = 0;
- }
int length = previousOut + 1;
if (length > clip->get_length()) {
if (! trackPlaylist.is_blank(blankIndex)) {
kDebug() << "WARNING, CLIP TO RESIZE IS NOT BLANK";
}
- if (blankLength + moveFrame == 0) trackPlaylist.remove(blankIndex);
- else trackPlaylist.resize_clip(blankIndex, 0, blankLength + moveFrame - 1);
+ if (blankLength + moveFrame == 0)
+ trackPlaylist.remove(blankIndex);
+ else
+ trackPlaylist.resize_clip(blankIndex, 0, blankLength + moveFrame - 1);
}
trackPlaylist.consolidate_blanks(0);
/*if (QString(clip->parent().get("transparency")).toInt() == 1) {