#include "transition.h"
#include "kdenlivesettings.h"
#include "kthumb.h"
+#include "profilesdialog.h"
#include <KDebug>
#include <KIcon>
ClipItem::ClipItem(DocClipBase *clip, ItemInfo info, double fps, double speed, bool generateThumbs) :
AbstractClipItem(info, QRectF(), fps),
m_clip(clip),
- m_resizeMode(NONE),
m_startFade(0),
m_endFade(0),
m_audioOnly(false),
setBrush(QColor(colour.left(7)));
} else if (m_clipType == IMAGE || m_clipType == TEXT) {
setBrush(QColor(141, 166, 215));
- m_startPix = KThumb::getImage(KUrl(clip->getProperty("resource")), (int)(KdenliveSettings::trackheight() * KdenliveSettings::project_display_ratio()), KdenliveSettings::trackheight());
+ //m_startPix = KThumb::getImage(KUrl(clip->getProperty("resource")), (int)(KdenliveSettings::trackheight() * KdenliveSettings::project_display_ratio()), KdenliveSettings::trackheight());
} else if (m_clipType == AUDIO) {
setBrush(QColor(141, 215, 166));
connect(clip, SIGNAL(gotAudioData()), this, SLOT(slotGotAudioData()));
ClipItem *ClipItem::clone(ItemInfo info) const
{
ClipItem *duplicate = new ClipItem(m_clip, info, m_fps, m_speed);
- if (info.cropStart == m_cropStart) duplicate->slotSetStartThumb(m_startPix);
- if (info.cropStart + (info.endPos - info.startPos) == m_cropStart + m_cropDuration) duplicate->slotSetEndThumb(m_endPix);
- kDebug() << "// CLoning clip: " << (info.cropStart + (info.endPos - info.startPos)).frames(m_fps) << ", CURRENT end: " << (cropStart() + duration()).frames(m_fps);
+ if (m_clipType == IMAGE || m_clipType == TEXT) duplicate->slotSetStartThumb(m_startPix);
+ else {
+ if (info.cropStart == m_cropStart) duplicate->slotSetStartThumb(m_startPix);
+ if (info.cropStart + (info.endPos - info.startPos) == m_cropStart + m_cropDuration) duplicate->slotSetEndThumb(m_endPix);
+ }
+ //kDebug() << "// CLoning clip: " << (info.cropStart + (info.endPos - info.startPos)).frames(m_fps) << ", CURRENT end: " << (cropStart() + duration()).frames(m_fps);
duplicate->setEffectList(m_effectList.clone());
duplicate->setVideoOnly(m_videoOnly);
duplicate->setAudioOnly(m_audioOnly);
{
m_effectList = effectList;
m_effectNames = m_effectList.effectNames().join(" / ");
+ if (!m_effectList.isEmpty()) setSelectedEffect(0);
}
const EffectsList ClipItem::effectList()
for (int i = 0; i < params.count(); i++) {
QDomElement e = params.item(i).toElement();
kDebug() << "// init eff: " << e.attribute("name");
+
+ // Check if this effect has a variable parameter
+ if (e.attribute("default").startsWith('%')) {
+ double evaluatedValue = ProfilesDialog::getStringEval(projectScene()->profile(), e.attribute("default"));
+ e.setAttribute("default", evaluatedValue);
+ if (e.hasAttribute("value") && e.attribute("value").startsWith('%')) {
+ e.setAttribute("value", evaluatedValue);
+ }
+ }
+
if (!e.isNull() && e.attribute("type") == "keyframe") {
QString def = e.attribute("default");
// Effect has a keyframe type parameter, we need to set the values
if (e.attribute("keyframes").isEmpty()) {
- e.setAttribute("keyframes", QString::number(m_cropStart.frames(m_fps)) + ':' + def + ';' + QString::number((m_cropStart + m_cropDuration).frames(m_fps)) + ':' + def);
+ e.setAttribute("keyframes", QString::number(cropStart().frames(m_fps)) + ':' + def + ';' + QString::number((cropStart() + cropDuration()).frames(m_fps)) + ':' + def);
//kDebug() << "///// EFFECT KEYFRAMES INITED: " << e.attribute("keyframes");
break;
}
}
if (effect.attribute("tag") == "volume" || effect.attribute("tag") == "brightness") {
if (effect.attribute("id") == "fadeout" || effect.attribute("id") == "fade_to_black") {
- int end = (duration() + cropStart()).frames(m_fps);
+ int end = (cropDuration() + cropStart()).frames(m_fps);
int start = end;
if (effect.attribute("id") == "fadeout") {
if (m_effectList.hasEffect(QString(), "fade_to_black") == -1) {
return effectAt(m_selectedEffect);
}
-void ClipItem::resetThumbs()
+void ClipItem::resetThumbs(bool clearExistingThumbs)
{
- m_startPix = QPixmap();
- m_endPix = QPixmap();
+ if (clearExistingThumbs) {
+ m_startPix = QPixmap();
+ m_endPix = QPixmap();
+ m_audioThumbCachePic.clear();
+ }
slotFetchThumbs();
- m_audioThumbCachePic.clear();
}
QString colour = m_clip->getProperty("colour");
colour = colour.replace(0, 2, "#");
setBrush(QColor(colour.left(7)));
- } else resetThumbs();
+ } else resetThumbs(checkDuration);
}
void ClipItem::slotFetchThumbs()
{
+ if (m_clipType == IMAGE || m_clipType == TEXT) {
+ if (m_startPix.isNull()) {
+ m_startPix = KThumb::getImage(KUrl(m_clip->getProperty("resource")), (int)(KdenliveSettings::trackheight() * KdenliveSettings::project_display_ratio()), KdenliveSettings::trackheight());
+ update();
+ }
+ return;
+ }
+
if (m_endPix.isNull() && m_startPix.isNull()) {
m_startThumbRequested = true;
m_endThumbRequested = true;
QRectF mappedRect;
if (m_clipType == AV && !isAudioOnly()) {
QRectF re = br;
- re.setTop(re.y() + re.height() / 2);
mappedRect = painter->matrix().mapRect(re);
- //painter->fillRect(mappedRect, QBrush(QColor(200, 200, 200, 140)));
+ mappedRect.setTop(mappedRect.bottom() - re.height() / 2);
} else mappedRect = mapped;
int channels = baseClip()->getProperty("channels").toInt();
pen.setStyle(Qt::DotLine);
painter->setPen(pen);
for (; it != markers.end(); ++it) {
- pos = (*it).time() - cropStart();
+ pos = (*it).time() / m_speed - cropStart();
if (pos > GenTime()) {
- if (pos > duration()) break;
+ if (pos > cropDuration()) break;
QLineF l(br.x() + pos.frames(m_fps), br.y(), br.x() + pos.frames(m_fps), br.bottom());
QLineF l2 = painter->matrix().map(l);
//framepos = scale * pos.frames(m_fps);
if (m_timeLine && m_timeLine->state() == QTimeLine::Running) {
qreal value = m_timeLine->currentValue();
txtBounding.setWidth(txtBounding.width() * value);
- markerBrush.setColor(QColor(50 + 200 * (1.0 - value), 50, 50, 100 + 50 * value));
+ markerBrush.setColor(QColor(50 + 200 *(1.0 - value), 50, 50, 100 + 50 * value));
} else markerBrush.setColor(QColor(50, 50, 50, 150));
painter->setBrush(markerBrush);
painter->setPen(Qt::NoPen);
}*/
// draw effect or transition keyframes
- if (itemWidth > 20) drawKeyFrames(painter, exposed);
+ if (mapped.width() > 20) drawKeyFrames(painter, exposed);
painter->setMatrixEnabled(true);
else setToolTip(i18n("Audio fade duration: %1s", GenTime(m_endFade, m_fps).seconds()));
return FADEOUT;
} else if ((rect.right() - pos.x() < maximumOffset) && (rect.bottom() - pos.y() > addtransitionOffset)) {
- setToolTip(i18n("Clip duration: %1s", duration().seconds()));
+ setToolTip(i18n("Clip duration: %1s", cropDuration().seconds()));
return RESIZEEND;
} else if ((pos.x() - rect.x() < 16 / scale) && (rect.bottom() - pos.y() <= addtransitionOffset)) {
setToolTip(i18n("Add transition"));
GenTime pos;
for (int i = 0; i < markers.size(); i++) {
- pos = markers.at(i) - cropStart();
+ pos = markers.at(i) / m_speed - cropStart();
if (pos > GenTime()) {
- if (pos > duration()) break;
+ if (pos > cropDuration()) break;
else snaps.append(pos + startPos());
}
}
GenTime pos;
for (int i = 0; i < markers.size(); i++) {
- pos = markers.at(i).time() - cropStart();
+ pos = markers.at(i).time() / m_speed - cropStart();
if (pos > GenTime()) {
- if (pos > duration()) break;
+ if (pos > cropDuration()) break;
else snaps.append(CommentedTime(pos + startPos(), markers.at(i).comment()));
}
}
//kDebug() << "// PREP AUDIO THMB FRMO : scale:" << pixelForOneFrame<< ", from: " << startpixel << ", to: " << endpixel;
//if ( (!audioThumbWasDrawn || framePixelWidth!=pixelForOneFrame ) && !baseClip()->audioFrameChache.isEmpty()){
- for (int startCache = startpixel - startpixel % 100;startCache < endpixel;startCache += 100) {
+ for (int startCache = startpixel - startpixel % 100; startCache < endpixel; startCache += 100) {
//kDebug() << "creating " << startCache;
//if (framePixelWidth!=pixelForOneFrame ||
if (m_framePixelWidth == pixelForOneFrame && m_audioThumbCachePic.contains(startCache))
int channelHeight = m_audioThumbCachePic[startCache].height() / channels;
- for (int i = 0;i < channels;i++) {
+ for (int i = 0; i < channels; i++) {
positiveChannelPaths[i].moveTo(0, channelHeight*i + channelHeight / 2);
negativeChannelPaths[i].moveTo(0, channelHeight*i + channelHeight / 2);
}
- for (int samples = 0;samples <= 100;samples++) {
+ for (int samples = 0; samples <= 100; samples++) {
double frame = (double)(samples + startCache - 0) / pixelForOneFrame;
int sample = (int)((frame - (int)(frame)) * 20); // AUDIO_FRAME_SIZE
if (frame < 0 || sample < 0 || sample > 19)
continue;
QMap<int, QByteArray> frame_channel_data = baseClip()->m_audioFrameCache[(int)frame];
- for (int channel = 0;channel < channels && frame_channel_data[channel].size() > 0;channel++) {
+ for (int channel = 0; channel < channels && frame_channel_data[channel].size() > 0; channel++) {
int y = channelHeight * channel + channelHeight / 2;
int delta = (int)(frame_channel_data[channel][sample] - 127 / 2) * channelHeight / 64;
negativeChannelPaths[channel].lineTo(samples, 0.1 + y - delta);
}
}
- for (int channel = 0;channel < channels ;channel++)
+ for (int channel = 0; channel < channels ; channel++)
if (fullAreaDraw && samples == 100) {
positiveChannelPaths[channel].lineTo(samples, channelHeight*channel + channelHeight / 2);
negativeChannelPaths[channel].lineTo(samples, channelHeight*channel + channelHeight / 2);
pixpainter.setPen(QPen(QColor(0, 0, 0)));
pixpainter.setBrush(QBrush(QColor(60, 60, 60)));
- for (int i = 0;i < channels;i++) {
+ for (int i = 0; i < channels; i++) {
if (fullAreaDraw) {
//pixpainter.fillPath(positiveChannelPaths[i].united(negativeChannelPaths[i]),QBrush(Qt::SolidPattern));//or singleif looks better
pixpainter.drawPath(positiveChannelPaths[i].united(negativeChannelPaths[i]));//or singleif looks better
}
-// virtual
-void ClipItem::mousePressEvent(QGraphicsSceneMouseEvent * event)
-{
- /*m_resizeMode = operationMode(event->pos());
- if (m_resizeMode == MOVE) {
- m_maxTrack = scene()->sceneRect().height();
- m_grabPoint = (int) (event->pos().x() - rect().x());
- }*/
- QGraphicsRectItem::mousePressEvent(event);
-}
-
-// virtual
-void ClipItem::mouseReleaseEvent(QGraphicsSceneMouseEvent * event)
-{
- m_resizeMode = NONE;
- QGraphicsRectItem::mouseReleaseEvent(event);
-}
-
/*
//virtual
void ClipItem::hoverEnterEvent(QGraphicsSceneHoverEvent *e)
if (posx > max && maxDuration() != GenTime()) posx = max;
if (posx == endPos().frames(m_fps)) return;
//kDebug() << "// NEW POS: " << posx << ", OLD END: " << endPos().frames(m_fps);
- const int previous = (cropStart() + duration()).frames(m_fps);
+ const int previous = (cropStart() + cropDuration()).frames(m_fps);
AbstractClipItem::resizeEnd(posx, m_speed);
- if ((int)(cropStart() + duration()).frames(m_fps) != previous) {
- if (updateKeyFrames) checkEffectsKeyframesPos(previous, (cropStart() + duration()).frames(m_fps), false);
+ if ((int)(cropStart() + cropDuration()).frames(m_fps) != previous) {
+ if (updateKeyFrames) checkEffectsKeyframesPos(previous, (cropStart() + cropDuration()).frames(m_fps), false);
if (m_hasThumbs && KdenliveSettings::videothumbnails()) {
/*connect(m_clip->thumbProducer(), SIGNAL(thumbReady(int, QPixmap)), this, SLOT(slotThumbReady(int, QPixmap)));*/
m_endThumbTimer->start(150);
kDebug() << "Invalid effect index: " << ix;
return;
}
- kDebug() << "CHange EFFECT AT: " << ix << ", CURR: " << m_effectList.at(ix).attribute("tag") << ", NEW: " << effect.attribute("tag");
+ //kDebug() << "CHange EFFECT AT: " << ix << ", CURR: " << m_effectList.at(ix).attribute("tag") << ", NEW: " << effect.attribute("tag");
effect.setAttribute("kdenlive_ix", ix + 1);
m_effectList.insert(ix, effect);
m_effectList.removeAt(ix + 1);
parameters.addParam("endtag", e.attribute("endtag", "end"));
}
- double f = e.attribute("factor", "1").toDouble();
-
- if (f == 1) {
+ if (e.attribute("factor", "1") == "1") {
parameters.addParam(e.attribute("name"), e.attribute("value"));
// check if it is a fade effect
}
}
} else {
- parameters.addParam(e.attribute("name"), QString::number(e.attribute("value").toDouble() / f));
+ double fact;
+ if (e.attribute("factor").startsWith('%')) {
+ fact = ProfilesDialog::getStringEval(projectScene()->profile(), e.attribute("factor"));
+ } else fact = e.attribute("factor", "1").toDouble();
+ parameters.addParam(e.attribute("name"), QString::number(e.attribute("value").toDouble() / fact));
}
}
}
QTextStream txtNeu(&neu);
if (values.size() > 0)
txtNeu << (int)values[0].toDouble();
- for (int i = 0;i < separators.size() && i + 1 < values.size();i++) {
+ for (int i = 0; i < separators.size() && i + 1 < values.size(); i++) {
txtNeu << separators[i];
txtNeu << (int)(values[i+1].toDouble());
}
parameters.addParam("start", neu);
} else {
if (e.attribute("factor", "1") != "1") {
- parameters.addParam(e.attribute("name"), QString::number(e.attribute("value").toDouble() / e.attribute("factor").toDouble()));
+ double fact;
+ if (e.attribute("factor").startsWith('%')) {
+ fact = ProfilesDialog::getStringEval(projectScene()->profile(), e.attribute("factor"));
+ } else fact = e.attribute("factor", "1").toDouble();
+ parameters.addParam(e.attribute("name"), QString::number(e.attribute("value").toDouble() / fact));
} else {
parameters.addParam(e.attribute("name"), e.attribute("value"));
}