static int FRAME_SIZE;
-ClipItem::ClipItem(DocClipBase *clip, ItemInfo info, double fps, double speed, int strobe, int frame_width, bool generateThumbs) :
- AbstractClipItem(info, QRectF(), fps),
- m_clip(clip),
- m_startFade(0),
- m_endFade(0),
- m_audioOnly(false),
- m_videoOnly(false),
- m_startPix(QPixmap()),
- m_endPix(QPixmap()),
- m_hasThumbs(false),
- m_selectedEffect(-1),
- m_timeLine(0),
- m_startThumbRequested(false),
- m_endThumbRequested(false),
- //m_hover(false),
- m_speed(speed),
- m_strobe(strobe),
- m_framePixelWidth(0),
- m_limitedKeyFrames(false)
+ClipItem::ClipItem(DocClipBase *clip, const ItemInfo& info, double fps, double speed, int strobe, int frame_width, bool generateThumbs) :
+ AbstractClipItem(info, QRectF(), fps),
+ m_clip(clip),
+ m_startFade(0),
+ m_endFade(0),
+ m_audioOnly(false),
+ m_videoOnly(false),
+ m_startPix(QPixmap()),
+ m_endPix(QPixmap()),
+ m_hasThumbs(false),
+ m_selectedEffect(-1),
+ m_timeLine(0),
+ m_startThumbRequested(false),
+ m_endThumbRequested(false),
+ //m_hover(false),
+ m_speed(speed),
+ m_strobe(strobe),
+ m_framePixelWidth(0),
+ m_limitedKeyFrames(false)
{
setZValue(2);
m_effectList = EffectsList(true);
setAcceptDrops(true);
m_audioThumbReady = m_clip->audioThumbCreated();
//setAcceptsHoverEvents(true);
- connect(this , SIGNAL(prepareAudioThumb(double, int, int, int)) , this, SLOT(slotPrepareAudioThumb(double, int, int, int)));
+ connect(this , SIGNAL(prepareAudioThumb(double,int,int,int,int)) , this, SLOT(slotPrepareAudioThumb(double,int,int,int,int)));
if (m_clipType == VIDEO || m_clipType == AV || m_clipType == SLIDESHOW || m_clipType == PLAYLIST) {
m_baseColor = QColor(141, 166, 215);
connect(&m_startThumbTimer, SIGNAL(timeout()), this, SLOT(slotGetStartThumb()));
m_endThumbTimer.setSingleShot(true);
connect(&m_endThumbTimer, SIGNAL(timeout()), this, SLOT(slotGetEndThumb()));
- connect(m_clip->thumbProducer(), SIGNAL(thumbReady(int, QImage)), this, SLOT(slotThumbReady(int, QImage)));
+ connect(m_clip->thumbProducer(), SIGNAL(thumbReady(int,QImage)), this, SLOT(slotThumbReady(int,QImage)));
connect(m_clip, SIGNAL(gotAudioData()), this, SLOT(slotGotAudioData()));
if (generateThumbs) QTimer::singleShot(200, this, SLOT(slotFetchThumbs()));
}
} else if (m_clipType == IMAGE || m_clipType == TEXT) {
m_baseColor = QColor(141, 166, 215);
if (m_clipType == TEXT) {
- connect(m_clip->thumbProducer(), SIGNAL(thumbReady(int, QImage)), this, SLOT(slotThumbReady(int, QImage)));
+ connect(m_clip->thumbProducer(), SIGNAL(thumbReady(int,QImage)), this, SLOT(slotThumbReady(int,QImage)));
}
//m_startPix = KThumb::getImage(KUrl(clip->getProperty("resource")), (int)(KdenliveSettings::trackheight() * KdenliveSettings::project_display_ratio()), KdenliveSettings::trackheight());
} else if (m_clipType == AUDIO) {
m_baseColor = QColor(141, 215, 166);
connect(m_clip, SIGNAL(gotAudioData()), this, SLOT(slotGotAudioData()));
}
+ m_paintColor = m_baseColor;
}
blockSignals(true);
m_endThumbTimer.stop();
m_startThumbTimer.stop();
- if (scene()) scene()->removeItem(this);
+ if (scene())
+ scene()->removeItem(this);
if (m_clipType == VIDEO || m_clipType == AV || m_clipType == SLIDESHOW || m_clipType == PLAYLIST) {
- //disconnect(m_clip->thumbProducer(), SIGNAL(thumbReady(int, QImage)), this, SLOT(slotThumbReady(int, QImage)));
+ //disconnect(m_clip->thumbProducer(), SIGNAL(thumbReady(int,QImage)), this, SLOT(slotThumbReady(int,QImage)));
//disconnect(m_clip, SIGNAL(gotAudioData()), this, SLOT(slotGotAudioData()));
}
delete m_timeLine;
}
-ClipItem *ClipItem::clone(ItemInfo info) const
+ClipItem *ClipItem::clone(const ItemInfo &info) const
{
ClipItem *duplicate = new ClipItem(m_clip, info, m_fps, m_speed, m_strobe, FRAME_SIZE);
if (m_clipType == IMAGE || m_clipType == TEXT) duplicate->slotSetStartThumb(m_startPix);
return duplicate;
}
-void ClipItem::setEffectList(const EffectsList effectList)
+void ClipItem::setEffectList(const EffectsList &effectList)
{
m_effectList.clone(effectList);
m_effectNames = m_effectList.effectNames().join(" / ");
if (!m_effectList.isEmpty()) {
- for (int i = 0; i < m_effectList.count(); i++) {
- QDomElement effect = m_effectList.at(i);
+ for (int i = 0; i < m_effectList.count(); ++i) {
+ QDomElement effect = m_effectList.at(i);
QString effectId = effect.attribute("id");
// check if it is a fade effect
QDomNodeList params = effect.elementsByTagName("parameter");
}
} else if (effectId == "fadeout") {
if (m_effectList.hasEffect(QString(), "fade_to_black") == -1) {
- if (e.attribute("name") == "out") fade -= e.attribute("value").toInt();
- else if (e.attribute("name") == "in") fade += e.attribute("value").toInt();
+ if (e.attribute("name") == "out") fade += e.attribute("value").toInt();
+ else if (e.attribute("name") == "in") fade -= e.attribute("value").toInt();
} else {
QDomElement fadeout = m_effectList.getEffectByTag(QString(), "fade_to_black");
- if (fadeout.attribute("name") == "out") fade -= fadeout.attribute("value").toInt();
- else if (fadeout.attribute("name") == "in") fade += fadeout.attribute("value").toInt();
+ if (fadeout.attribute("name") == "out") fade += fadeout.attribute("value").toInt();
+ else if (fadeout.attribute("name") == "in") fade -= fadeout.attribute("value").toInt();
}
} else if (effectId == "fade_to_black") {
if (m_effectList.hasEffect(QString(), "fadeout") == -1) {
- if (e.attribute("name") == "out") fade -= e.attribute("value").toInt();
- else if (e.attribute("name") == "in") fade += e.attribute("value").toInt();
+ if (e.attribute("name") == "out") fade += e.attribute("value").toInt();
+ else if (e.attribute("name") == "in") fade -= e.attribute("value").toInt();
} else {
QDomElement fadeout = m_effectList.getEffectByTag(QString(), "fadeout");
- if (fadeout.attribute("name") == "out") fade -= fadeout.attribute("value").toInt();
- else if (fadeout.attribute("name") == "in") fade += fadeout.attribute("value").toInt();
+ if (fadeout.attribute("name") == "out") fade += fadeout.attribute("value").toInt();
+ else if (fadeout.attribute("name") == "in") fade -= fadeout.attribute("value").toInt();
}
}
}
else if (fade < 0)
m_endFade = -fade;
}
- setSelectedEffect(0);
+ setSelectedEffect(1);
}
}
return m_selectedEffect;
}
-void ClipItem::initEffect(QDomElement effect, int diff)
+void ClipItem::initEffect(QDomElement effect, int diff, int offset)
{
// the kdenlive_ix int is used to identify an effect in mlt's playlist, should
// not be changed
// Init parameter value & keyframes if required
QDomNodeList params = effect.elementsByTagName("parameter");
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
if (e.isNull())
e.setAttribute("value", "1");
}
- if ((e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe") && e.attribute("keyframes").isEmpty()) {
- // Effect has a keyframe type parameter, we need to set the values
- e.setAttribute("keyframes", QString::number(cropStart().frames(m_fps)) + ':' + e.attribute("default"));
+ if (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe") {
+ if (e.attribute("keyframes").isEmpty()) {
+ // Effect has a keyframe type parameter, we need to set the values
+ e.setAttribute("keyframes", QString::number((int) cropStart().frames(m_fps)) + ':' + e.attribute("default"));
+ }
+ else if (offset != 0) {
+ // adjust keyframes to this clip
+ QString adjusted = adjustKeyframes(e.attribute("keyframes"), offset - cropStart().frames(m_fps));
+ e.setAttribute("keyframes", adjusted);
+ }
+ }
+
+ if (e.attribute("type") == "geometry" && !e.hasAttribute("fixed")) {
+ // Effects with a geometry parameter need to sync in / out with parent clip
+ effect.setAttribute("in", QString::number((int) cropStart().frames(m_fps)));
+ effect.setAttribute("out", QString::number((int) (cropStart() + cropDuration()).frames(m_fps) - 1));
+ effect.setAttribute("_sync_in_out", "1");
}
}
if (effect.attribute("tag") == "volume" || effect.attribute("tag") == "brightness") {
int start = end;
if (effect.attribute("id") == "fadeout") {
if (m_effectList.hasEffect(QString(), "fade_to_black") == -1) {
- int effectDuration = EffectsList::parameter(effect, "in").toInt();
+ int effectDuration = EffectsList::parameter(effect, "out").toInt() - EffectsList::parameter(effect, "in").toInt();
if (effectDuration > cropDuration().frames(m_fps)) {
effectDuration = cropDuration().frames(m_fps) / 2;
}
}
} else if (effect.attribute("id") == "fade_to_black") {
if (m_effectList.hasEffect(QString(), "fadeout") == -1) {
- int effectDuration = EffectsList::parameter(effect, "in").toInt();
+ int effectDuration = EffectsList::parameter(effect, "out").toInt() - EffectsList::parameter(effect, "in").toInt();
if (effectDuration > cropDuration().frames(m_fps)) {
effectDuration = cropDuration().frames(m_fps) / 2;
}
if (effect.attribute("id") == "fadein") {
if (m_effectList.hasEffect(QString(), "fade_from_black") == -1) {
int effectDuration = EffectsList::parameter(effect, "out").toInt();
+ if (offset != 0) effectDuration -= offset;
if (effectDuration > cropDuration().frames(m_fps)) {
effectDuration = cropDuration().frames(m_fps) / 2;
}
end += effectDuration;
} else
- end += EffectsList::parameter(m_effectList.getEffectByTag(QString(), "fade_from_black"), "out").toInt();
+ end += EffectsList::parameter(m_effectList.getEffectByTag(QString(), "fade_from_black"), "out").toInt() - offset;
} else if (effect.attribute("id") == "fade_from_black") {
if (m_effectList.hasEffect(QString(), "fadein") == -1) {
int effectDuration = EffectsList::parameter(effect, "out").toInt();
+ if (offset != 0) effectDuration -= offset;
if (effectDuration > cropDuration().frames(m_fps)) {
effectDuration = cropDuration().frames(m_fps) / 2;
}
end += effectDuration;
} else
- end += EffectsList::parameter(m_effectList.getEffectByTag(QString(), "fadein"), "out").toInt();
+ end += EffectsList::parameter(m_effectList.getEffectByTag(QString(), "fadein"), "out").toInt() - offset;
}
EffectsList::setParameter(effect, "in", QString::number(start));
EffectsList::setParameter(effect, "out", QString::number(end));
}
}
-bool ClipItem::checkKeyFrames()
+const QString ClipItem::adjustKeyframes(const QString &keyframes, int offset)
+{
+ QStringList result;
+ // Simple keyframes
+ const QStringList list = keyframes.split(QLatin1Char(';'), QString::SkipEmptyParts);
+ foreach(const QString &keyframe, list) {
+ const int pos = keyframe.section(':', 0, 0).toInt() - offset;
+ const QString newKey = QString::number(pos) + ":" + keyframe.section(':', 1);
+ result.append(newKey);
+ }
+ return result.join(";");
+}
+
+bool ClipItem::checkKeyFrames(int width, int height, int previousDuration, int cutPos)
{
bool clipEffectsModified = false;
QLocale locale;
// go through all effects this clip has
for (int ix = 0; ix < m_effectList.count(); ++ix) {
+ // Check geometry params
+ resizeGeometries(ix, width, height, previousDuration, cutPos == -1 ? 0 : cutPos, cropDuration().frames(m_fps) - 1);
+
+ // Check keyframe params
QStringList keyframeParams = keyframes(ix);
QStringList newKeyFrameParams;
bool effModified = false;
int lastPos = -1;
double lastValue = -1;
int start = cropStart().frames(m_fps);
- int end = (cropStart() + cropDuration()).frames(m_fps);
+ int end = (cropStart() + cropDuration()).frames(m_fps) - 1;
// go through all keyframes for one param
foreach(const QString &str, keyframes) {
if (pos > start) {
int diff = pos - lastPos;
double ratio = (double)(start - lastPos) / diff;
- double newValue = lastValue + (val - lastValue) * ratio;
- newKeyFrames.append(QString::number(start) + ':' + locale.toString(newValue));
+ int newValue = lastValue + (val - lastValue) * ratio;
+ newKeyFrames.append(QString::number(start) + ':' + QString::number(newValue));
modified = true;
}
cutKeyFrame = false;
int diff = pos - lastPos;
if (diff != 0) {
double ratio = (double)(end - lastPos) / diff;
- double newValue = lastValue + (val - lastValue) * ratio;
- newKeyFrames.append(QString::number(end) + ':' + locale.toString(newValue));
+ int newValue = lastValue + (val - lastValue) * ratio;
+ newKeyFrames.append(QString::number(end) + ':' + QString::number(newValue));
modified = true;
}
break;
} else {
- newKeyFrames.append(QString::number(pos) + ':' + locale.toString(val));
+ newKeyFrames.append(QString::number(pos) + ':' + QString::number(val));
}
}
lastPos = pos;
return clipEffectsModified;
}
-void ClipItem::setKeyframes(const int ix, const QStringList keyframes)
+void ClipItem::setKeyframes(const int ix, const QStringList &keyframes)
{
QDomElement effect = m_effectList.at(ix);
if (effect.attribute("disable") == "1") return;
QLocale locale;
QDomNodeList params = effect.elementsByTagName("parameter");
int keyframeParams = 0;
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
- if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe") && e.attribute("intimeline") == "1") {
+ if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe") && (!e.hasAttribute("intimeline") || e.attribute("intimeline") == "1")) {
e.setAttribute("keyframes", keyframes.at(keyframeParams));
- if (ix == m_selectedEffect && keyframeParams == 0) {
+ if (ix + 1 == m_selectedEffect && keyframeParams == 0) {
m_keyframes.clear();
m_visibleParam = i;
double max = locale.toDouble(e.attribute("max"));
{
m_selectedEffect = ix;
QLocale locale;
- QDomElement effect = effectAt(m_selectedEffect);
+ QDomElement effect = effectAtIndex(m_selectedEffect);
if (!effect.isNull() && effect.attribute("disable") != "1") {
QDomNodeList params = effect.elementsByTagName("parameter");
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
- if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe") && e.attribute("intimeline") == "1") {
+ if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe") && (!e.hasAttribute("intimeline") || e.attribute("intimeline") == "1")) {
m_keyframes.clear();
m_limitedKeyFrames = e.attribute("type") == "keyframe";
m_visibleParam = i;
}
}
+void ClipItem::resizeGeometries(const int index, int width, int height, int previousDuration, int start, int duration)
+{
+ QString geom;
+ QDomElement effect = m_effectList.at(index);
+ QDomNodeList params = effect.elementsByTagName("parameter");
+
+ for (int i = 0; i < params.count(); ++i) {
+ QDomElement e = params.item(i).toElement();
+ if (!e.isNull() && e.attribute("type") == "geometry") {
+ geom = e.attribute("value");
+ Mlt::Geometry geometry(geom.toUtf8().data(), previousDuration, width, height);
+ e.setAttribute("value", geometry.serialise(start, start + duration));
+ }
+ }
+}
+
QStringList ClipItem::keyframes(const int index)
{
QStringList result;
QDomElement effect = m_effectList.at(index);
QDomNodeList params = effect.elementsByTagName("parameter");
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe"))
result.append(e.attribute("keyframes"));
void ClipItem::updateKeyframeEffect()
{
// regenerate xml parameter from the clip keyframes
- QDomElement effect = getEffectAt(m_selectedEffect);
+ QDomElement effect = getEffectAtIndex(m_selectedEffect);
if (effect.attribute("disable") == "1") return;
QDomNodeList params = effect.elementsByTagName("parameter");
QDomElement e = params.item(m_visibleParam).toElement();
QDomElement ClipItem::selectedEffect()
{
- if (m_selectedEffect == -1 || m_effectList.isEmpty()) return QDomElement();
- return effectAt(m_selectedEffect);
+ if (m_selectedEffect == -1 || m_effectList.isEmpty())
+ return QDomElement();
+ return effectAtIndex(m_selectedEffect);
}
void ClipItem::resetThumbs(bool clearExistingThumbs)
}
-void ClipItem::slotSetStartThumb(QImage img)
+void ClipItem::slotSetStartThumb(const QImage &img)
{
if (!img.isNull() && img.format() == QImage::Format_ARGB32) {
QPixmap pix = QPixmap::fromImage(img);
}
}
-void ClipItem::slotSetEndThumb(QImage img)
+void ClipItem::slotSetEndThumb(const QImage &img)
{
if (!img.isNull() && img.format() == QImage::Format_ARGB32) {
QPixmap pix = QPixmap::fromImage(img);
}
}
-void ClipItem::slotThumbReady(int frame, QImage img)
+void ClipItem::slotThumbReady(int frame, const QImage &img)
{
if (scene() == NULL) return;
QRectF r = boundingRect();
}
}
-void ClipItem::slotSetStartThumb(const QPixmap pix)
+void ClipItem::slotSetStartThumb(const QPixmap &pix)
{
m_startPix = pix;
}
-void ClipItem::slotSetEndThumb(const QPixmap pix)
+void ClipItem::slotSetEndThumb(const QPixmap &pix)
{
m_endPix = pix;
}
const QStyleOptionGraphicsItem *option,
QWidget *)
{
- QColor paintColor;
+ QPalette palette = scene()->palette();
+ QColor paintColor = m_paintColor;
+ QColor textColor;
+ QColor textBgColor;
QPen framePen;
- if (parentItem()) paintColor = QColor(255, 248, 149);
- else paintColor = m_baseColor;
if (isSelected() || (parentItem() && parentItem()->isSelected())) {
- paintColor = paintColor.darker();
- framePen.setColor(Qt::red);
- framePen.setWidthF(2.0);
+ textColor = palette.highlightedText().color();
+ textBgColor = palette.highlight().color();
+ framePen.setColor(textBgColor);
+ paintColor.setRed(qMin(paintColor.red() * 2, 255));
}
else {
- framePen.setColor(paintColor.darker());
+ textColor = palette.text().color();
+ textBgColor = palette.window().color();
+ textBgColor.setAlpha(200);
+ framePen.setColor(m_paintColor.darker());
}
-
const QRectF exposed = option->exposedRect;
- painter->setClipRect(exposed);
- painter->fillRect(exposed, paintColor);
- painter->setWorldMatrixEnabled(false);;
- const QRectF mapped = painter->worldTransform().mapRect(rect());
+ const QTransform transformation = painter->worldTransform();
+ const QRectF mappedExposed = transformation.mapRect(exposed);
+ const QRectF mapped = transformation.mapRect(rect());
+ painter->setWorldTransform(QTransform());
+ QPainterPath p;
+ p.addRect(mappedExposed);
+ QPainterPath q;
+ q.addRoundedRect(mapped, 3, 3);
+ painter->setRenderHints(QPainter::Antialiasing | QPainter::SmoothPixmapTransform, false);
+ painter->setClipPath(p.intersected(q));
+ painter->setPen(Qt::NoPen);
+ painter->fillRect(mappedExposed, paintColor);
+ painter->setPen(m_paintColor.darker());
// draw thumbnails
if (KdenliveSettings::videothumbnails() && !isAudioOnly()) {
- QPen pen = painter->pen();
- pen.setColor(QColor(255, 255, 255, 150));
- painter->setPen(pen);
+ QRectF thumbRect;
if ((m_clipType == IMAGE || m_clipType == TEXT) && !m_startPix.isNull()) {
- const QPointF top = mapped.topRight() - QPointF(m_startPix.width() - 1, 0);
- painter->drawPixmap(top, m_startPix);
- QLineF l2(top.x(), mapped.top(), top.x(), mapped.bottom());
- painter->drawLine(l2);
+ if (thumbRect.isNull()) thumbRect = QRectF(0, 0, mapped.height() / m_startPix.height() * m_startPix.width(), mapped.height());
+ thumbRect.moveTopRight(mapped.topRight());
+ painter->drawPixmap(thumbRect, m_startPix, m_startPix.rect());
+ //const QPointF top = mapped.topRight() - QPointF(m_startPix.width() - 1, 0);
+ //painter->drawPixmap(top, m_startPix);
+ //QLineF l2(top.x(), mapped.top(), top.x(), mapped.bottom());
+ //painter->drawLine(l2);
} else if (!m_endPix.isNull()) {
- const QPointF top = mapped.topRight() - QPointF(m_endPix.width() - 1, 0);
- painter->drawPixmap(top, m_endPix);
- QLineF l2(top.x(), mapped.top(), top.x(), mapped.bottom());
- painter->drawLine(l2);
+ if (thumbRect.isNull()) thumbRect = QRectF(0, 0, mapped.height() / m_endPix.height() * m_endPix.width(), mapped.height());
+ thumbRect.moveTopRight(mapped.topRight());
+ painter->drawPixmap(thumbRect, m_endPix, m_endPix.rect());
+ //const QPointF top = mapped.topRight() - QPointF(m_endPix.width() - 1, 0);
+ //painter->drawPixmap(top, m_endPix);
+ //QLineF l2(top.x(), mapped.top(), top.x(), mapped.bottom());
+ //painter->drawLine(l2);
}
if (!m_startPix.isNull()) {
- painter->drawPixmap(mapped.topLeft(), m_startPix);
- QLineF l2(mapped.left() + m_startPix.width(), mapped.top(), mapped.left() + m_startPix.width(), mapped.bottom());
- painter->drawLine(l2);
+ if (thumbRect.isNull()) thumbRect = QRectF(0, 0, mapped.height() / m_startPix.height() * m_startPix.width(), mapped.height());
+ thumbRect.moveTopLeft(mapped.topLeft());
+ painter->drawPixmap(thumbRect, m_startPix, m_startPix.rect());
+ //painter->drawPixmap(mapped.topLeft(), m_startPix);
+ //QLineF l2(mapped.left() + m_startPix.width(), mapped.top(), mapped.left() + m_startPix.width(), mapped.bottom());
+ //painter->drawLine(l2);
}
// if we are in full zoom, paint thumbnail for every frame
- if (m_clip->thumbProducer() && clipType() != COLOR && clipType() != AUDIO && !m_audioOnly && painter->worldTransform().m11() == FRAME_SIZE) {
+ if (m_clip->thumbProducer() && clipType() != COLOR && clipType() != AUDIO && !m_audioOnly && transformation.m11() == FRAME_SIZE) {
int offset = (m_info.startPos - m_info.cropStart).frames(m_fps);
int left = qMax((int) m_info.cropStart.frames(m_fps) + 1, (int) mapToScene(exposed.left(), 0).x() - offset);
int right = qMin((int)(m_info.cropStart + m_info.cropDuration).frames(m_fps) - 1, (int) mapToScene(exposed.right(), 0).x() - offset);
QPointF startPos = mapped.topLeft();
int startOffset = m_info.cropStart.frames(m_fps);
if (clipType() == IMAGE || clipType() == TEXT) {
- for (int i = left; i <= right; i++) {
+ for (int i = left; i <= right; ++i) {
painter->drawPixmap(startPos + QPointF(FRAME_SIZE *(i - startOffset), 0), m_startPix);
}
}
else {
#if KDE_IS_VERSION(4,5,0)
if (m_clip && m_clip->thumbProducer()) {
- QString path = m_clip->fileURL().path() + "_";
+ QString path = m_clip->fileURL().path() + '_';
QImage img;
QPen pen(Qt::white);
pen.setStyle(Qt::DotLine);
- painter->setPen(pen);
QList <int> missing;
- for (int i = left; i <= right; i++) {
+ for (int i = left; i <= right; ++i) {
img = m_clip->thumbProducer()->findCachedThumb(path + QString::number(i));
QPointF xpos = startPos + QPointF(FRAME_SIZE *(i - startOffset), 0);
if (img.isNull()) missing << i;
- else painter->drawImage(xpos, img);
+ else {
+ painter->drawImage(xpos, img);
+ }
painter->drawLine(xpos, xpos + QPointF(0, mapped.height()));
}
if (!missing.isEmpty()) {
#endif
}
}
- painter->setPen(Qt::black);
}
-
// draw audio thumbnails
if (KdenliveSettings::audiothumbnails() && m_speed == 1.0 && !isVideoOnly() && ((m_clipType == AV && (exposed.bottom() > (rect().height() / 2) || isAudioOnly())) || m_clipType == AUDIO) && m_audioThumbReady) {
mappedRect.setTop(mappedRect.bottom() - mapped.height() / 2);
} else mappedRect = mapped;
- double scale = painter->worldTransform().m11();
+ double scale = transformation.m11();
int channels = 0;
if (isEnabled() && m_clip) channels = m_clip->getProperty("channels").toInt();
if (scale != m_framePixelWidth)
m_audioThumbCachePic.clear();
double cropLeft = m_info.cropStart.frames(m_fps);
const int clipStart = mappedRect.x();
- const int mappedStartPixel = painter->worldTransform().map(QPointF(startpixel + cropLeft, 0)).x() - clipStart;
- const int mappedEndPixel = painter->worldTransform().map(QPointF(endpixel + cropLeft, 0)).x() - clipStart;
+ const int mappedStartPixel = transformation.map(QPointF(startpixel + cropLeft, 0)).x() - clipStart;
+ const int mappedEndPixel = transformation.map(QPointF(endpixel + cropLeft, 0)).x() - clipStart;
cropLeft = cropLeft * scale;
if (channels >= 1) {
- emit prepareAudioThumb(scale, mappedStartPixel, mappedEndPixel, channels);
+ emit prepareAudioThumb(scale, mappedStartPixel, mappedEndPixel, channels, (int) (mappedRect.height() + 0.5));
}
-
+ QRectF pixmapRect(0, mappedRect.y(), 100, mappedRect.height());
for (int startCache = mappedStartPixel - (mappedStartPixel) % 100; startCache < mappedEndPixel; startCache += 100) {
- if (m_audioThumbCachePic.contains(startCache) && !m_audioThumbCachePic[startCache].isNull())
- painter->drawPixmap(clipStart + startCache - cropLeft, mappedRect.y(), m_audioThumbCachePic[startCache]);
+ if (!m_audioThumbCachePic.value(startCache).isNull()) {
+ //painter->drawPixmap(clipStart + startCache - cropLeft, mappedRect.y(), m_audioThumbCachePic.value(startCache));
+ QPixmap pix(m_audioThumbCachePic.value(startCache));
+ pixmapRect.moveLeft(clipStart + startCache - cropLeft);
+ painter->drawPixmap(pixmapRect, pix, pix.rect());
+ }
}
}
+
+ if (m_isMainSelectedClip) {
+ framePen.setColor(Qt::red);
+ textBgColor = Qt::red;
+ }
// only paint details if clip is big enough
if (mapped.width() > 20) {
// Draw effects names
if (!m_effectNames.isEmpty() && mapped.width() > 40) {
QRectF txtBounding = painter->boundingRect(mapped, Qt::AlignLeft | Qt::AlignTop, m_effectNames);
- QColor bgColor;
+ QColor bColor = palette.window().color();
+ QColor tColor = palette.text().color();
+ tColor.setAlpha(220);
if (m_timeLine && m_timeLine->state() == QTimeLine::Running) {
qreal value = m_timeLine->currentValue();
txtBounding.setWidth(txtBounding.width() * value);
- bgColor.setRgb(50 + 200 *(1.0 - value), 50, 50, 100 + 50 * value);
- } else bgColor.setRgb(50, 50, 90, 180);
-
- QPainterPath rounded;
- rounded.moveTo(txtBounding.bottomRight());
- rounded.arcTo(txtBounding.right() - txtBounding.height() - 2, txtBounding.top() - txtBounding.height(), txtBounding.height() * 2, txtBounding.height() * 2, 270, 90);
- rounded.lineTo(txtBounding.topLeft());
- rounded.lineTo(txtBounding.bottomLeft());
- painter->fillPath(rounded, bgColor);
- painter->setPen(Qt::lightGray);
- painter->drawText(txtBounding.adjusted(1, 0, 1, 0), Qt::AlignCenter, m_effectNames);
+ bColor.setAlpha(100 + 50 * value);
+ };
+
+ painter->setBrush(bColor);
+ painter->setPen(Qt::NoPen);
+ painter->drawRoundedRect(txtBounding.adjusted(-1, -2, 4, -1), 3, 3);
+ painter->setPen(tColor);
+ painter->drawText(txtBounding.adjusted(2, 0, 1, -1), Qt::AlignCenter, m_effectNames);
}
- const QRectF txtBounding2 = painter->boundingRect(mapped, Qt::AlignHCenter | Qt::AlignVCenter, ' ' + m_clipName + ' ');
- painter->setBrush(framePen.color());
+ // Draw clip name
+ const QRectF txtBounding2 = painter->boundingRect(mapped, Qt::AlignRight | Qt::AlignTop, m_clipName + ' ').adjusted(0, -1, 0, -1);
painter->setPen(Qt::NoPen);
- painter->drawRoundedRect(txtBounding2, 3, 3);
+ painter->fillRect(txtBounding2.adjusted(-3, 0, 0, 0), textBgColor);
painter->setBrush(QBrush(Qt::NoBrush));
-
+ painter->setPen(textColor);
if (m_videoOnly) {
painter->drawPixmap(txtBounding2.topLeft() - QPointF(17, -1), m_videoPix);
} else if (m_audioOnly) {
painter->drawPixmap(txtBounding2.topLeft() - QPointF(17, -1), m_audioPix);
}
- painter->setPen(Qt::white);
- painter->drawText(txtBounding2, Qt::AlignCenter, m_clipName);
+ painter->drawText(txtBounding2, Qt::AlignLeft, m_clipName);
// draw markers
double framepos;
QBrush markerBrush(QColor(120, 120, 0, 140));
QPen pen = painter->pen();
- pen.setColor(QColor(255, 255, 255, 200));
- pen.setStyle(Qt::DotLine);
for (; it != markers.end(); ++it) {
pos = GenTime((int)((*it).time().frames(m_fps) / qAbs(m_speed) + 0.5), m_fps) - cropStart();
if (pos > GenTime()) {
if (pos > cropDuration()) break;
QLineF l(rect().x() + pos.frames(m_fps), rect().y(), rect().x() + pos.frames(m_fps), rect().bottom());
- QLineF l2 = painter->worldTransform().map(l);
+ QLineF l2 = transformation.map(l);
+ pen.setColor(CommentedTime::markerColor((*it).markerType()));
+ pen.setStyle(Qt::DotLine);
painter->setPen(pen);
painter->drawLine(l2);
if (KdenliveSettings::showmarkers()) {
framepos = rect().x() + pos.frames(m_fps);
- const QRectF r1(framepos + 0.04, 10, rect().width() - framepos - 2, rect().height() - 10);
- const QRectF r2 = painter->worldTransform().mapRect(r1);
+ const QRectF r1(framepos + 0.04, rect().height()/3, rect().width() - framepos - 2, rect().height() / 2);
+ const QRectF r2 = transformation.mapRect(r1);
const QRectF txtBounding3 = painter->boundingRect(r2, Qt::AlignLeft | Qt::AlignTop, ' ' + (*it).comment() + ' ');
painter->setBrush(markerBrush);
- painter->setPen(Qt::NoPen);
- painter->drawRoundedRect(txtBounding3, 3, 3);
- painter->setBrush(QBrush(Qt::NoBrush));
+ pen.setStyle(Qt::SolidLine);
+ painter->setPen(pen);
+ painter->drawRect(txtBounding3);
+ painter->setBrush(Qt::NoBrush);
painter->setPen(Qt::white);
painter->drawText(txtBounding3, Qt::AlignCenter, (*it).comment());
}
fadeInPath.lineTo(0, rect().height());
fadeInPath.lineTo(m_startFade, 0);
fadeInPath.closeSubpath();
- QPainterPath f1 = painter->worldTransform().map(fadeInPath);
+ QPainterPath f1 = transformation.map(fadeInPath);
painter->fillPath(f1/*.intersected(resultClipPath)*/, fades);
/*if (isSelected()) {
QLineF l(m_startFade * scale, 0, 0, itemHeight);
fadeOutPath.lineTo(rect().width(), rect().height());
fadeOutPath.lineTo(rect().width() - m_endFade, 0);
fadeOutPath.closeSubpath();
- QPainterPath f1 = painter->worldTransform().map(fadeOutPath);
+ QPainterPath f1 = transformation.map(fadeOutPath);
painter->fillPath(f1/*.intersected(resultClipPath)*/, fades);
/*if (isSelected()) {
QLineF l(itemWidth - m_endFade * scale, 0, itemWidth, itemHeight);
painter->setPen(QPen(Qt::lightGray));
// draw effect or transition keyframes
- drawKeyFrames(painter, m_limitedKeyFrames);
+ drawKeyFrames(painter, transformation, m_limitedKeyFrames);
}
// draw clip border
// expand clip rect to allow correct painting of clip border
painter->setClipping(false);
+ painter->setRenderHint(QPainter::Antialiasing, true);
+ framePen.setWidthF(1.5);
painter->setPen(framePen);
- if (isSelected() || (parentItem() && parentItem()->isSelected())) {
- painter->drawRect(mapped.adjusted(0.5, 0.5, -0.5, -0.5));
- }
- else {
- painter->drawRect(mapped.adjusted(0, 0, -0.5, 0));
- }
+ painter->drawRoundedRect(mapped.adjusted(0.5, 0, -0.5, 0), 3, 3);
}
-OPERATIONTYPE ClipItem::operationMode(QPointF pos)
+OPERATIONTYPE ClipItem::operationMode(const QPointF &pos)
{
if (isItemLocked()) return NONE;
const double scale = projectScene()->scale().x();
}
QRectF rect = sceneBoundingRect();
int addtransitionOffset = 10;
- // Don't allow add transition if track height is very small
- if (rect.height() < 30) addtransitionOffset = 0;
+ // Don't allow add transition if track height is very small. No transitions for audio only clips
+ if (rect.height() < 30 || isAudioOnly() || m_clipType == AUDIO) addtransitionOffset = 0;
if (qAbs((int)(pos.x() - (rect.x() + m_startFade))) < maximumOffset && qAbs((int)(pos.y() - rect.y())) < 6) {
return FADEIN;
- } else if (pos.x() - rect.x() < maximumOffset && (rect.bottom() - pos.y() > addtransitionOffset)) {
+ } else if ((pos.x() <= rect.x() + rect.width() / 2) && pos.x() - rect.x() < maximumOffset && (rect.bottom() - pos.y() > addtransitionOffset)) {
+ // If we are in a group, allow resize only if all clips start at same position
+ if (parentItem()) {
+ QGraphicsItemGroup *dragGroup = static_cast <QGraphicsItemGroup *>(parentItem());
+ QList<QGraphicsItem *> list = dragGroup->childItems();
+ for (int i = 0; i < list.count(); ++i) {
+ if (list.at(i)->type() == AVWIDGET) {
+ ClipItem *c = static_cast <ClipItem*>(list.at(i));
+ if (c->startPos() != startPos()) return MOVE;
+ }
+ }
+ }
return RESIZESTART;
} else if (qAbs((int)(pos.x() - (rect.x() + rect.width() - m_endFade))) < maximumOffset && qAbs((int)(pos.y() - rect.y())) < 6) {
return FADEOUT;
- } else if ((rect.right() - pos.x() < maximumOffset) && (rect.bottom() - pos.y() > addtransitionOffset)) {
+ } else if ((pos.x() >= rect.x() + rect.width() / 2) && (rect.right() - pos.x() < maximumOffset) && (rect.bottom() - pos.y() > addtransitionOffset)) {
+ // If we are in a group, allow resize only if all clips end at same position
+ if (parentItem()) {
+ QGraphicsItemGroup *dragGroup = static_cast <QGraphicsItemGroup *>(parentItem());
+ QList<QGraphicsItem *> list = dragGroup->childItems();
+ for (int i = 0; i < list.count(); ++i) {
+ if (list.at(i)->type() == AVWIDGET) {
+ ClipItem *c = static_cast <ClipItem*>(list.at(i));
+ if (c->endPos() != endPos()) return MOVE;
+ }
+ }
+ }
return RESIZEEND;
} else if ((pos.x() - rect.x() < 16 / scale) && (rect.bottom() - pos.y() <= addtransitionOffset)) {
return TRANSITIONSTART;
QList <GenTime> ClipItem::snapMarkers() const
{
QList < GenTime > snaps;
- QList < GenTime > markers = baseClip()->snapMarkers();
+ if (!m_clip) return snaps;
+ QList < GenTime > markers = m_clip->snapMarkers();
GenTime pos;
- for (int i = 0; i < markers.size(); i++) {
+ for (int i = 0; i < markers.size(); ++i) {
pos = GenTime((int)(markers.at(i).frames(m_fps) / qAbs(m_speed) + 0.5), m_fps) - cropStart();
if (pos > GenTime()) {
if (pos > cropDuration()) break;
QList <CommentedTime> ClipItem::commentedSnapMarkers() const
{
QList < CommentedTime > snaps;
- QList < CommentedTime > markers = baseClip()->commentedSnapMarkers();
+ if (!m_clip) return snaps;
+ QList < CommentedTime > markers = m_clip->commentedSnapMarkers();
GenTime pos;
- for (int i = 0; i < markers.size(); i++) {
+ for (int i = 0; i < markers.size(); ++i) {
pos = GenTime((int)(markers.at(i).time().frames(m_fps) / qAbs(m_speed) + 0.5), m_fps) - cropStart();
if (pos > GenTime()) {
if (pos > cropDuration()) break;
- else snaps.append(CommentedTime(pos + startPos(), markers.at(i).comment()));
+ else snaps.append(CommentedTime(pos + startPos(), markers.at(i).comment(), markers.at(i).markerType()));
}
}
return snaps;
}
-void ClipItem::slotPrepareAudioThumb(double pixelForOneFrame, int startpixel, int endpixel, int channels)
+void ClipItem::slotPrepareAudioThumb(double pixelForOneFrame, int startpixel, int endpixel, int channels, int pixelHeight)
{
- QRectF re = sceneBoundingRect();
- if (m_clipType == AV && !isAudioOnly()) re.setTop(re.y() + re.height() / 2);
+ // Bail out, if caller provided invalid data
+ if (channels <= 0) {
+ kWarning() << "Unable to draw image with " << channels << "number of channels";
+ return;
+ }
+ int factor = 64;
+ if (KdenliveSettings::normaliseaudiothumbs()) {
+ factor = m_clip->getProperty("audio_max").toInt();
+ }
//kDebug() << "// PREP AUDIO THMB FRMO : scale:" << pixelForOneFrame<< ", from: " << startpixel << ", to: " << endpixel;
//if ( (!audioThumbWasDrawn || framePixelWidth!=pixelForOneFrame ) && !baseClip()->audioFrameChache.isEmpty()){
+ bool fullAreaDraw = pixelForOneFrame < 10;
+ bool simplifiedAudio = !KdenliveSettings::displayallchannels();
+ QPen audiopen;
+ audiopen.setWidth(0);
+ if (simplifiedAudio) channels = 1;
+ int channelHeight = pixelHeight / channels;
+ QMap<int, QPainterPath > positiveChannelPaths;
+ QMap<int, QPainterPath > negativeChannelPaths;
for (int startCache = startpixel - startpixel % 100; startCache < endpixel; startCache += 100) {
- //kDebug() << "creating " << startCache;
- //if (framePixelWidth!=pixelForOneFrame ||
if (m_framePixelWidth == pixelForOneFrame && m_audioThumbCachePic.contains(startCache))
continue;
- if (m_audioThumbCachePic[startCache].isNull() || m_framePixelWidth != pixelForOneFrame) {
- m_audioThumbCachePic[startCache] = QPixmap(100, (int)(re.height()));
- m_audioThumbCachePic[startCache].fill(QColor(180, 180, 200, 140));
+ if (m_audioThumbCachePic.value(startCache).isNull() || m_framePixelWidth != pixelForOneFrame) {
+ QPixmap pix(100, pixelHeight);
+ pix.fill(QColor(180, 180, 180, 150));
+ m_audioThumbCachePic[startCache] = pix;
}
- bool fullAreaDraw = pixelForOneFrame < 10;
- QMap<int, QPainterPath > positiveChannelPaths;
- QMap<int, QPainterPath > negativeChannelPaths;
+ positiveChannelPaths.clear();
+ negativeChannelPaths.clear();
+
QPainter pixpainter(&m_audioThumbCachePic[startCache]);
- QPen audiopen;
- audiopen.setWidth(0);
- pixpainter.setPen(audiopen);
- //pixpainter.setRenderHint(QPainter::Antialiasing,true);
- //pixpainter.drawLine(0,0,100,re.height());
- // Bail out, if caller provided invalid data
- if (channels <= 0) {
- kWarning() << "Unable to draw image with " << channels << "number of channels";
- return;
- }
- int channelHeight = m_audioThumbCachePic[startCache].height() / channels;
-
- for (int i = 0; i < channels; i++) {
-
- positiveChannelPaths[i].moveTo(0, channelHeight*i + channelHeight / 2);
- negativeChannelPaths[i].moveTo(0, channelHeight*i + channelHeight / 2);
+ for (int i = 0; i < channels; ++i) {
+ if (simplifiedAudio) {
+ positiveChannelPaths[i].moveTo(-1, channelHeight);
+ }
+ else if (fullAreaDraw) {
+ positiveChannelPaths[i].moveTo(-1, channelHeight*i + channelHeight / 2);
+ negativeChannelPaths[i].moveTo(-1, channelHeight*i + channelHeight / 2);
+ }
+ else {
+ positiveChannelPaths[i].moveTo(-1, channelHeight*i + channelHeight / 2);
+ audiopen.setColor(QColor(60, 60, 60, 50));
+ pixpainter.setPen(audiopen);
+ pixpainter.drawLine(0, channelHeight*i + channelHeight / 2, 100, channelHeight*i + channelHeight / 2);
+ }
}
for (int samples = 0; samples <= 100; samples++) {
int sample = (int)((frame - (int)(frame)) * 20); // AUDIO_FRAME_SIZE
if (frame < 0 || sample < 0 || sample > 19)
continue;
- QMap<int, QByteArray> frame_channel_data = baseClip()->m_audioFrameCache[(int)frame];
-
- for (int channel = 0; channel < channels && frame_channel_data[channel].size() > 0; channel++) {
+ const QMap<int, QByteArray> frame_channel_data = baseClip()->audioFrameCache.value((int)frame);
+ for (int channel = 0; channel < channels && !frame_channel_data.value(channel).isEmpty(); channel++) {
int y = channelHeight * channel + channelHeight / 2;
- int delta = (int)(frame_channel_data[channel][sample] - 127 / 2) * channelHeight / 64;
- if (fullAreaDraw) {
- positiveChannelPaths[channel].lineTo(samples, 0.1 + y + qAbs(delta));
- negativeChannelPaths[channel].lineTo(samples, 0.1 + y - qAbs(delta));
+ if (simplifiedAudio) {
+ double delta = qAbs((frame_channel_data.value(channel).at(sample) - 63.5) * channelHeight / factor);
+ positiveChannelPaths[channel].lineTo(samples, channelHeight - delta);
+ } else if (fullAreaDraw) {
+ double delta = qAbs((frame_channel_data.value(channel).at(sample) - 63.5) * channelHeight / (2 * factor));
+ positiveChannelPaths[channel].lineTo(samples, y + delta);
+ negativeChannelPaths[channel].lineTo(samples, y - delta);
} else {
- positiveChannelPaths[channel].lineTo(samples, 0.1 + y + delta);
- negativeChannelPaths[channel].lineTo(samples, 0.1 + y - delta);
+ double delta = (frame_channel_data.value(channel).at(sample) - 63.5) * channelHeight / (2 * factor);
+ positiveChannelPaths[channel].lineTo(samples, y + delta);
}
}
- for (int channel = 0; channel < channels ; channel++)
- if (fullAreaDraw && samples == 100) {
- positiveChannelPaths[channel].lineTo(samples, channelHeight*channel + channelHeight / 2);
- negativeChannelPaths[channel].lineTo(samples, channelHeight*channel + channelHeight / 2);
- positiveChannelPaths[channel].lineTo(0, channelHeight*channel + channelHeight / 2);
- negativeChannelPaths[channel].lineTo(0, channelHeight*channel + channelHeight / 2);
- }
-
}
- pixpainter.setPen(QPen(QColor(0, 0, 0)));
- pixpainter.setBrush(QBrush(QColor(60, 60, 60)));
-
- for (int i = 0; i < channels; i++) {
+ for (int channel = 0; channel < channels; channel++) {
+ if (simplifiedAudio) {
+ positiveChannelPaths[channel].lineTo(101, channelHeight);
+ } else if (fullAreaDraw) {
+ int y = channelHeight * channel + channelHeight / 2;
+ positiveChannelPaths[channel].lineTo(101, y);
+ negativeChannelPaths[channel].lineTo(101, y);
+ }
+ }
+ if (fullAreaDraw || simplifiedAudio) {
+ audiopen.setColor(QColor(80, 80, 80, 200));
+ pixpainter.setPen(audiopen);
+ pixpainter.setBrush(QBrush(QColor(120, 120, 120, 200)));
+ }
+ else {
+ audiopen.setColor(QColor(60, 60, 60, 100));
+ pixpainter.setPen(audiopen);
+ pixpainter.setBrush(Qt::NoBrush);
+ }
+ pixpainter.setRenderHint(QPainter::Antialiasing, false);
+ for (int i = 0; i < channels; ++i) {
if (fullAreaDraw) {
- //pixpainter.fillPath(positiveChannelPaths[i].united(negativeChannelPaths[i]),QBrush(Qt::SolidPattern));//or singleif looks better
- pixpainter.drawPath(positiveChannelPaths[i].united(negativeChannelPaths[i]));//or singleif looks better
+ pixpainter.drawPath(positiveChannelPaths[i].united(negativeChannelPaths.value(i)));
} else
- pixpainter.drawPath(positiveChannelPaths[i]);
+ pixpainter.drawPath(positiveChannelPaths.value(i));
}
}
- //audioThumbWasDrawn=true;
m_framePixelWidth = pixelForOneFrame;
-
- //}
}
int ClipItem::fadeIn() const
}
*/
-void ClipItem::resizeStart(int posx, bool /*size*/)
+void ClipItem::resizeStart(int posx, bool /*size*/, bool emitChange)
{
bool sizeLimit = false;
if (clipType() != IMAGE && clipType() != COLOR && clipType() != TEXT) {
m_startThumbTimer.start(150);
}
}
+ if (emitChange) slotUpdateRange();
}
-void ClipItem::resizeEnd(int posx)
+void ClipItem::slotUpdateRange()
+{
+ if (m_isMainSelectedClip) emit updateRange();
+}
+
+void ClipItem::resizeEnd(int posx, bool emitChange)
{
const int max = (startPos() - cropStart() + maxDuration()).frames(m_fps);
if (posx > max && maxDuration() != GenTime()) posx = max;
m_endThumbTimer.start(150);
}
}
+ if (emitChange) slotUpdateRange();
}
//virtual
QVariant ClipItem::itemChange(GraphicsItemChange change, const QVariant &value)
{
if (change == QGraphicsItem::ItemSelectedChange) {
- if (value.toBool()) setZValue(10);
- else setZValue(2);
+ if (value.toBool())
+ setZValue(10);
+ else
+ setZValue(2);
}
if (change == ItemPositionChange && scene()) {
// calculate new position.
int xpos = projectScene()->getSnapPointForPos((int) newPos.x(), KdenliveSettings::snaptopoints());
xpos = qMax(xpos, 0);
newPos.setX(xpos);
- int newTrack = newPos.y() / KdenliveSettings::trackheight();
+ // Warning: newPos gives a position relative to the click event, so hack to get absolute pos
+ int yOffset = property("y_absolute").toInt() + newPos.y();
+ int newTrack = yOffset / KdenliveSettings::trackheight();
newTrack = qMin(newTrack, projectScene()->tracksCount() - 1);
newTrack = qMax(newTrack, 0);
+ QStringList lockedTracks = property("locked_tracks").toStringList();
+ if (lockedTracks.contains(QString::number(newTrack))) {
+ // Trying to move to a locked track
+ return pos();
+ }
newPos.setY((int)(newTrack * KdenliveSettings::trackheight() + 1));
// Only one clip is moving
QRectF sceneShape = rect();
bool forwardMove = newPos.x() > pos().x();
int offset = 0;
if (!items.isEmpty()) {
- for (int i = 0; i < items.count(); i++) {
+ for (int i = 0; i < items.count(); ++i) {
if (!items.at(i)->isEnabled()) continue;
if (items.at(i)->type() == type()) {
// Collision!
//kDebug()<<"// ITEM NEW POS: "<<newPos.x()<<", mapped: "<<mapToScene(newPos.x(), 0).x();
return newPos;
}
+ if (change == ItemParentChange) {
+ QGraphicsItem* parent = value.value<QGraphicsItem*>();
+ if (parent) m_paintColor = m_baseColor.lighter(135);
+ else m_paintColor = m_baseColor;
+ }
return QGraphicsItem::itemChange(change, value);
}
return m_effectList.effectNames();
}
-QDomElement ClipItem::effectAt(int ix) const
+QDomElement ClipItem::effect(int ix) const
+{
+ if (ix >= m_effectList.count() || ix < 0) return QDomElement();
+ return m_effectList.at(ix).cloneNode().toElement();
+}
+
+QDomElement ClipItem::effectAtIndex(int ix) const
{
if (ix > m_effectList.count() || ix <= 0) return QDomElement();
return m_effectList.itemFromIndex(ix).cloneNode().toElement();
}
-QDomElement ClipItem::getEffectAt(int ix) const
+QDomElement ClipItem::getEffectAtIndex(int ix) const
{
if (ix > m_effectList.count() || ix <= 0) return QDomElement();
return m_effectList.itemFromIndex(ix);
}
-bool ClipItem::updateEffect(QDomElement effect)
+void ClipItem::updateEffect(QDomElement effect)
{
//kDebug() << "CHange EFFECT AT: " << ix << ", CURR: " << m_effectList.at(ix).attribute("tag") << ", NEW: " << effect.attribute("tag");
m_effectList.updateEffect(effect);
r.setHeight(20);
update(r);
}
- return true;
+}
+
+void ClipItem::enableEffects(QList <int> indexes, bool disable)
+{
+ m_effectList.enableEffects(indexes, disable);
}
bool ClipItem::moveEffect(QDomElement effect, int ix)
int ix;
QDomElement insertedEffect;
if (!effect.hasAttribute("kdenlive_ix")) {
+ // effect dropped from effect list
ix = effectsCounter();
} else ix = effect.attribute("kdenlive_ix").toInt();
if (!m_effectList.isEmpty() && ix <= m_effectList.count()) {
// Update index to the real one
effect.setAttribute("kdenlive_ix", insertedEffect.attribute("kdenlive_ix"));
+ int effectIn;
+ int effectOut;
+
+ if (effect.attribute("tag") == "affine") {
+ // special case: the affine effect needs in / out points
+ effectIn = effect.attribute("in").toInt();
+ effectOut = effect.attribute("out").toInt();
+ }
+ else {
+ effectIn = EffectsList::parameter(effect, "in").toInt();
+ effectOut = EffectsList::parameter(effect, "out").toInt();
+ }
EffectsParameterList parameters;
parameters.addParam("tag", insertedEffect.attribute("tag"));
if (effectId.isEmpty()) effectId = insertedEffect.attribute("tag");
parameters.addParam("id", effectId);
- // special case: the affine effect needs in / out points
-
QDomNodeList params = insertedEffect.elementsByTagName("parameter");
int fade = 0;
bool needInOutSync = false;
- for (int i = 0; i < params.count(); i++) {
+
+ // check if it is a fade effect
+ if (effectId == "fadein") {
+ needRepaint = true;
+ if (m_effectList.hasEffect(QString(), "fade_from_black") == -1) {
+ fade = effectOut - effectIn;
+ }/* else {
+ QDomElement fadein = m_effectList.getEffectByTag(QString(), "fade_from_black");
+ if (fadein.attribute("name") == "out") fade += fadein.attribute("value").toInt();
+ else if (fadein.attribute("name") == "in") fade -= fadein.attribute("value").toInt();
+ }*/
+ } else if (effectId == "fade_from_black") {
+ needRepaint = true;
+ if (m_effectList.hasEffect(QString(), "fadein") == -1) {
+ fade = effectOut - effectIn;
+ }/* else {
+ QDomElement fadein = m_effectList.getEffectByTag(QString(), "fadein");
+ if (fadein.attribute("name") == "out") fade += fadein.attribute("value").toInt();
+ else if (fadein.attribute("name") == "in") fade -= fadein.attribute("value").toInt();
+ }*/
+ } else if (effectId == "fadeout") {
+ needRepaint = true;
+ if (m_effectList.hasEffect(QString(), "fade_to_black") == -1) {
+ fade = effectIn - effectOut;
+ } /*else {
+ QDomElement fadeout = m_effectList.getEffectByTag(QString(), "fade_to_black");
+ if (fadeout.attribute("name") == "out") fade -= fadeout.attribute("value").toInt();
+ else if (fadeout.attribute("name") == "in") fade += fadeout.attribute("value").toInt();
+ }*/
+ } else if (effectId == "fade_to_black") {
+ needRepaint = true;
+ if (m_effectList.hasEffect(QString(), "fadeout") == -1) {
+ fade = effectIn - effectOut;
+ }/* else {
+ QDomElement fadeout = m_effectList.getEffectByTag(QString(), "fadeout");
+ if (fadeout.attribute("name") == "out") fade -= fadeout.attribute("value").toInt();
+ else if (fadeout.attribute("name") == "in") fade += fadeout.attribute("value").toInt();
+ }*/
+ }
+
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
if (!e.isNull()) {
if (e.attribute("type") == "geometry" && !e.hasAttribute("fixed")) {
needInOutSync = true;
}
if (e.attribute("type") == "simplekeyframe") {
- QStringList values = e.attribute("keyframes").split(";", QString::SkipEmptyParts);
+ QStringList values = e.attribute("keyframes").split(';', QString::SkipEmptyParts);
double factor = locale.toDouble(e.attribute("factor", "1"));
double offset = e.attribute("offset", "0").toDouble();
if (factor != 1 || offset != 0) {
for (int j = 0; j < values.count(); j++) {
QString pos = values.at(j).section(':', 0, 0);
double val = (locale.toDouble(values.at(j).section(':', 1, 1)) - offset) / factor;
- values[j] = pos + "=" + locale.toString(val);
+ values[j] = pos + '=' + locale.toString(val);
}
}
parameters.addParam(e.attribute("name"), values.join(";"));
} else if (e.attribute("factor", "1") == "1" && e.attribute("offset", "0") == "0") {
parameters.addParam(e.attribute("name"), e.attribute("value"));
- // check if it is a fade effect
- if (effectId == "fadein") {
- needRepaint = true;
- if (m_effectList.hasEffect(QString(), "fade_from_black") == -1) {
- if (e.attribute("name") == "out") fade += e.attribute("value").toInt();
- else if (e.attribute("name") == "in") fade -= e.attribute("value").toInt();
- } else {
- QDomElement fadein = m_effectList.getEffectByTag(QString(), "fade_from_black");
- if (fadein.attribute("name") == "out") fade += fadein.attribute("value").toInt();
- else if (fadein.attribute("name") == "in") fade -= fadein.attribute("value").toInt();
- }
- } else if (effectId == "fade_from_black") {
- needRepaint = true;
- if (m_effectList.hasEffect(QString(), "fadein") == -1) {
- if (e.attribute("name") == "out") fade += e.attribute("value").toInt();
- else if (e.attribute("name") == "in") fade -= e.attribute("value").toInt();
- } else {
- QDomElement fadein = m_effectList.getEffectByTag(QString(), "fadein");
- if (fadein.attribute("name") == "out") fade += fadein.attribute("value").toInt();
- else if (fadein.attribute("name") == "in") fade -= fadein.attribute("value").toInt();
- }
- } else if (effectId == "fadeout") {
- needRepaint = true;
- if (m_effectList.hasEffect(QString(), "fade_to_black") == -1) {
- if (e.attribute("name") == "out") fade -= e.attribute("value").toInt();
- else if (e.attribute("name") == "in") fade += e.attribute("value").toInt();
- } else {
- QDomElement fadeout = m_effectList.getEffectByTag(QString(), "fade_to_black");
- if (fadeout.attribute("name") == "out") fade -= fadeout.attribute("value").toInt();
- else if (fadeout.attribute("name") == "in") fade += fadeout.attribute("value").toInt();
- }
- } else if (effectId == "fade_to_black") {
- needRepaint = true;
- if (m_effectList.hasEffect(QString(), "fadeout") == -1) {
- if (e.attribute("name") == "out") fade -= e.attribute("value").toInt();
- else if (e.attribute("name") == "in") fade += e.attribute("value").toInt();
- } else {
- QDomElement fadeout = m_effectList.getEffectByTag(QString(), "fadeout");
- if (fadeout.attribute("name") == "out") fade -= fadeout.attribute("value").toInt();
- else if (fadeout.attribute("name") == "in") fade += fadeout.attribute("value").toInt();
- }
- }
} else {
double fact;
if (e.attribute("factor").contains('%')) {
}
}
if (needInOutSync) {
- parameters.addParam("in", QString::number(cropStart().frames(m_fps)));
- parameters.addParam("out", QString::number((cropStart() + cropDuration()).frames(m_fps) - 1));
+ parameters.addParam("in", QString::number((int) cropStart().frames(m_fps)));
+ parameters.addParam("out", QString::number((int) (cropStart() + cropDuration()).frames(m_fps) - 1));
parameters.addParam("_sync_in_out", "1");
}
m_effectNames = m_effectList.effectNames().join(" / ");
else if (fade < 0) m_endFade = -fade;
if (m_selectedEffect == -1) {
- setSelectedEffect(0);
+ setSelectedEffect(1);
} else if (m_selectedEffect == ix - 1) setSelectedEffect(m_selectedEffect);
if (needRepaint) update(boundingRect());
/*if (animate) {
return parameters;
}
-void ClipItem::deleteEffect(QString index)
+void ClipItem::deleteEffect(const QString &index)
{
bool needRepaint = false;
int ix = index.toInt();
QDomElement effect = m_effectList.itemFromIndex(ix);
QString effectId = effect.attribute("id");
if ((effectId == "fadein" && hasEffect(QString(), "fade_from_black") == -1) ||
- (effectId == "fade_from_black" && hasEffect(QString(), "fadein") == -1)) {
+ (effectId == "fade_from_black" && hasEffect(QString(), "fadein") == -1)) {
m_startFade = 0;
needRepaint = true;
} else if ((effectId == "fadeout" && hasEffect(QString(), "fade_to_black") == -1) ||
- (effectId == "fade_to_black" && hasEffect(QString(), "fadeout") == -1)) {
+ (effectId == "fade_to_black" && hasEffect(QString(), "fadeout") == -1)) {
m_endFade = 0;
needRepaint = true;
} else if (EffectsList::hasKeyFrames(effect)) needRepaint = true;
m_effectList.removeAt(ix);
m_effectNames = m_effectList.effectNames().join(" / ");
- if (m_effectList.isEmpty() || m_selectedEffect + 1 == ix) {
+ if (m_effectList.isEmpty() || m_selectedEffect == ix) {
// Current effect was removed
- if (ix > m_effectList.count() - 1) {
- setSelectedEffect(m_effectList.count() - 1);
+ if (ix > m_effectList.count()) {
+ setSelectedEffect(m_effectList.count());
} else setSelectedEffect(ix);
}
- if (needRepaint) update(boundingRect());
- else {
+ if (needRepaint) {
+ update(boundingRect());
+ } else {
QRectF r = boundingRect();
r.setHeight(20);
update(r);
if (m_speed <= 0 && m_speed > -1)
m_speed = -1.0;
m_strobe = strobe;
- if (m_speed == 1.0) m_clipName = baseClip()->name();
- else m_clipName = baseClip()->name() + " - " + QString::number(speed * 100, 'f', 0) + '%';
+ if (m_speed == 1.0) m_clipName = m_clip->name();
+ else m_clipName = m_clip->name() + " - " + QString::number(speed * 100, 'f', 0) + '%';
m_info.cropStart = GenTime((int)(m_speedIndependantInfo.cropStart.frames(m_fps) / qAbs(m_speed) + 0.5), m_fps);
m_info.cropDuration = GenTime((int)(m_speedIndependantInfo.cropDuration.frames(m_fps) / qAbs(m_speed) + 0.5), m_fps);
//update();
return m_speedIndependantInfo;
}
+int ClipItem::nextFreeEffectGroupIndex() const
+{
+ int freeGroupIndex = 0;
+ for (int i = 0; i < m_effectList.count(); ++i) {
+ QDomElement effect = m_effectList.at(i);
+ EffectInfo effectInfo;
+ effectInfo.fromString(effect.attribute("kdenlive_info"));
+ if (effectInfo.groupIndex >= freeGroupIndex) {
+ freeGroupIndex = effectInfo.groupIndex + 1;
+ }
+ }
+ return freeGroupIndex;
+}
+
//virtual
void ClipItem::dropEvent(QGraphicsSceneDragDropEvent * event)
{
if (event->proposedAction() == Qt::CopyAction && scene() && !scene()->views().isEmpty()) {
- const QString effects = QString::fromUtf8(event->mimeData()->data("kdenlive/effectslist"));
- event->acceptProposedAction();
- QDomDocument doc;
- doc.setContent(effects, true);
- QDomElement e = doc.documentElement();
- if (e.tagName() == "list") {
- // dropped an effect group
- QDomNodeList effectlist = e.elementsByTagName("effect");
- for (int i = 0; i < effectlist.count(); i++) {
- effectlist.at(i).toElement().removeAttribute("kdenlive_ix");
- }
- } else {
- // single effect dropped
- e.removeAttribute("kdenlive_ix");
- }
- CustomTrackView *view = (CustomTrackView *) scene()->views()[0];
- if (view) view->slotAddEffect(e, m_info.startPos, track());
+ const QString effects = QString::fromUtf8(event->mimeData()->data("kdenlive/effectslist"));
+ event->acceptProposedAction();
+ QDomDocument doc;
+ doc.setContent(effects, true);
+ QDomElement e = doc.documentElement();
+ if (e.tagName() == "effectgroup") {
+ // dropped an effect group
+ QDomNodeList effectlist = e.elementsByTagName("effect");
+ int freeGroupIndex = nextFreeEffectGroupIndex();
+ EffectInfo effectInfo;
+ for (int i = 0; i < effectlist.count(); ++i) {
+ QDomElement effect = effectlist.at(i).toElement();
+ effectInfo.fromString(effect.attribute("kdenlive_info"));
+ effectInfo.groupIndex = freeGroupIndex;
+ effect.setAttribute("kdenlive_info", effectInfo.toString());
+ effect.removeAttribute("kdenlive_ix");
+ }
+ } else {
+ // single effect dropped
+ e.removeAttribute("kdenlive_ix");
+ }
+ CustomTrackView *view = (CustomTrackView *) scene()->views().first();
+ if (view) view->slotDropEffect(this, e, m_info.startPos, track());
}
else return;
}
{
if (isItemLocked()) event->setAccepted(false);
else if (event->mimeData()->hasFormat("kdenlive/effectslist")) {
- event->acceptProposedAction();
+ event->acceptProposedAction();
} else event->setAccepted(false);
}
} else if (m_clipType == AUDIO) m_baseColor = QColor(141, 215, 166);
else m_baseColor = QColor(141, 166, 215);
}
+ if (parentItem())
+ m_paintColor = m_baseColor.lighter(135);
+ else
+ m_paintColor = m_baseColor;
m_audioThumbCachePic.clear();
}
effect.setAttribute("active_keyframe", pos);
m_editedKeyframe = pos;
QDomNodeList params = effect.elementsByTagName("parameter");
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe")) {
QString kfr = e.attribute("keyframes");
newkfr.append(str);
} else if (!added) {
if (i == m_visibleParam)
- newkfr.append(QString::number(pos) + ":" + QString::number(val));
+ newkfr.append(QString::number(pos) + ':' + QString::number(val));
else
- newkfr.append(QString::number(pos) + ":" + locale.toString(newval));
+ newkfr.append(QString::number(pos) + ':' + locale.toString(newval));
if (kpos > pos) newkfr.append(str);
added = true;
} else newkfr.append(str);
}
if (!added) {
if (i == m_visibleParam)
- newkfr.append(QString::number(pos) + ":" + QString::number(val));
+ newkfr.append(QString::number(pos) + ':' + QString::number(val));
else
- newkfr.append(QString::number(pos) + ":" + e.attribute("default"));
+ newkfr.append(QString::number(pos) + ':' + e.attribute("default"));
}
e.setAttribute("keyframes", newkfr.join(";"));
}
QDomNodeList params = effect.elementsByTagName("parameter");
int start = cropStart().frames(m_fps);
int end = (cropStart() + cropDuration()).frames(m_fps) - 1;
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe")) {
QString kfr = e.attribute("keyframes");
newpos = qMax(newpos, start);
newpos = qMin(newpos, end);
if (i == m_visibleParam)
- newkfr.append(QString::number(newpos) + ":" + locale.toString(value));
+ newkfr.append(QString::number(newpos) + ':' + locale.toString(value));
else
- newkfr.append(QString::number(newpos) + ":" + str.section(':', 1, 1));
+ newkfr.append(QString::number(newpos) + ':' + str.section(':', 1, 1));
}
}
e.setAttribute("keyframes", newkfr.join(";"));
if (isAudioOnly())
return m_clip->audioProducer(track);
else if (isVideoOnly())
- return m_clip->videoProducer();
+ return m_clip->videoProducer(track);
else
return m_clip->getProducer(trackSpecific ? track : -1);
}
-QMap<int, QDomElement> ClipItem::adjustEffectsToDuration(int width, int height, ItemInfo oldInfo)
+QMap<int, QDomElement> ClipItem::adjustEffectsToDuration(int width, int height, const ItemInfo &oldInfo)
{
QMap<int, QDomElement> effects;
- for (int i = 0; i < m_effectList.count(); i++) {
+ for (int i = 0; i < m_effectList.count(); ++i) {
QDomElement effect = m_effectList.at(i);
if (effect.attribute("id").startsWith("fade")) {
if (id == "fade_from_black" || id == "fadein") {
if (in != cropStart().frames(m_fps)) {
effects[i] = effect.cloneNode().toElement();
- int diff = in - cropStart().frames(m_fps);
- in -= diff;
- out -= diff;
+ int duration = out - in;
+ in = cropStart().frames(m_fps);
+ out = in + duration;
EffectsList::setParameter(effect, "in", QString::number(in));
EffectsList::setParameter(effect, "out", QString::number(out));
}
effects[i] = effect.cloneNode().toElement();
EffectsList::setParameter(effect, "out", QString::number(clipEnd));
}
- if (effects.contains(i))
+ if (effects.contains(i)) {
setFadeIn(out - in);
+ }
} else {
if (out != clipEnd) {
effects[i] = effect.cloneNode().toElement();
if (in < cropStart().frames(m_fps)) {
if (!effects.contains(i))
effects[i] = effect.cloneNode().toElement();
- EffectsList::setParameter(effect, "in", QString::number(cropStart().frames(m_fps)));
+ EffectsList::setParameter(effect, "in", QString::number((int) cropStart().frames(m_fps)));
}
if (effects.contains(i))
setFadeOut(out - in);
int frame = EffectsList::parameter(effect, "frame").toInt();
EffectsList::setParameter(effect, "frame", QString::number(frame - diff));
continue;
+ } else if (effect.attribute("id") == "pan_zoom") {
+ effect.setAttribute("in", cropStart().frames(m_fps));
+ effect.setAttribute("out", (cropStart() + cropDuration()).frames(m_fps) - 1);
}
QDomNodeList params = effect.elementsByTagName("parameter");
} else if (type == "simplekeyframe" || type == "keyframe") {
if (!effects.contains(i))
effects[i] = effect.cloneNode().toElement();
- updateNormalKeyframes(param);
+ updateNormalKeyframes(param, oldInfo);
#ifdef USE_QJSON
} else if (type == "roto-spline") {
if (!effects.contains(i))
return effects;
}
-bool ClipItem::updateNormalKeyframes(QDomElement parameter)
+bool ClipItem::updateNormalKeyframes(QDomElement parameter, ItemInfo oldInfo)
{
int in = cropStart().frames(m_fps);
int out = (cropStart() + cropDuration()).frames(m_fps) - 1;
+ int oldin = oldInfo.cropStart.frames(m_fps);
QLocale locale;
+ bool keyFrameUpdated = false;
const QStringList data = parameter.attribute("keyframes").split(';', QString::SkipEmptyParts);
QMap <int, double> keyframes;
- foreach (QString keyframe, data)
- keyframes[keyframe.section(':', 0, 0).toInt()] = locale.toDouble(keyframe.section(':', 1, 1));
+ foreach (QString keyframe, data) {
+ int keyframepos = keyframe.section(':', 0, 0).toInt();
+ // if keyframe was at clip start, update it
+ if (keyframepos == oldin) {
+ keyframepos = in;
+ keyFrameUpdated = true;
+ }
+ keyframes[keyframepos] = locale.toDouble(keyframe.section(':', 1, 1));
+ }
QMap<int, double>::iterator i = keyframes.end();
relPos = (out - lastPos) / (qreal)(i.key() - lastPos + 1);
keyframes[out] = lastValue + (i.value() - lastValue) * relPos;
}
- }
+ }
lastPos = i.key();
lastValue = i.value();
if (endFound)
++i;
}
- if (startFound || endFound) {
+ if (startFound || endFound || keyFrameUpdated) {
QString newkfr;
QMap<int, double>::const_iterator k = keyframes.constBegin();
while (k != keyframes.constEnd()) {
void ClipItem::updateGeometryKeyframes(QDomElement effect, int paramIndex, int width, int height, ItemInfo oldInfo)
{
-
QDomElement param = effect.elementsByTagName("parameter").item(paramIndex).toElement();
int offset = oldInfo.cropStart.frames(m_fps);
QString data = param.attribute("value");
if (offset > 0) {
QStringList kfrs = data.split(';');
data.clear();
- foreach (QString keyframe, kfrs) {
+ foreach (const QString &keyframe, kfrs) {
if (keyframe.contains('=')) {
int pos = keyframe.section('=', 0, 0).toInt();
pos += offset;
- data.append(QString::number(pos) + "=" + keyframe.section('=', 1) + ";");
+ data.append(QString::number(pos) + '=' + keyframe.section('=', 1) + ";");
}
- else data.append(keyframe + ";");
+ else data.append(keyframe + ';');
}
}
Mlt::Geometry geometry(data.toUtf8().data(), oldInfo.cropDuration.frames(m_fps), width, height);
update();
}
+
#include "clipitem.moc"