static int FRAME_SIZE;
-ClipItem::ClipItem(DocClipBase *clip, ItemInfo info, double fps, double speed, int strobe, int frame_width, bool generateThumbs) :
- AbstractClipItem(info, QRectF(), fps),
- m_clip(clip),
- m_startFade(0),
- m_endFade(0),
- m_audioOnly(false),
- m_videoOnly(false),
- m_startPix(QPixmap()),
- m_endPix(QPixmap()),
- m_hasThumbs(false),
- m_selectedEffect(-1),
- m_timeLine(0),
- m_startThumbRequested(false),
- m_endThumbRequested(false),
- //m_hover(false),
- m_speed(speed),
- m_strobe(strobe),
- m_framePixelWidth(0),
- m_limitedKeyFrames(false)
+ClipItem::ClipItem(DocClipBase *clip, const ItemInfo& info, double fps, double speed, int strobe, int frame_width, bool generateThumbs) :
+ AbstractClipItem(info, QRectF(), fps),
+ m_clip(clip),
+ m_startFade(0),
+ m_endFade(0),
+ m_audioOnly(false),
+ m_videoOnly(false),
+ m_startPix(QPixmap()),
+ m_endPix(QPixmap()),
+ m_hasThumbs(false),
+ m_selectedEffect(-1),
+ m_timeLine(0),
+ m_startThumbRequested(false),
+ m_endThumbRequested(false),
+ //m_hover(false),
+ m_speed(speed),
+ m_strobe(strobe),
+ m_framePixelWidth(0),
+ m_limitedKeyFrames(false)
{
setZValue(2);
m_effectList = EffectsList(true);
setAcceptDrops(true);
m_audioThumbReady = m_clip->audioThumbCreated();
//setAcceptsHoverEvents(true);
- connect(this , SIGNAL(prepareAudioThumb(double, int, int, int)) , this, SLOT(slotPrepareAudioThumb(double, int, int, int)));
+ connect(this , SIGNAL(prepareAudioThumb(double,int,int,int,int)) , this, SLOT(slotPrepareAudioThumb(double,int,int,int,int)));
- if (m_clipType == VIDEO || m_clipType == AV || m_clipType == SLIDESHOW || m_clipType == PLAYLIST) {
+ if (m_clipType == Video || m_clipType == AV || m_clipType == SlideShow || m_clipType == Playlist) {
m_baseColor = QColor(141, 166, 215);
if (!m_clip->isPlaceHolder()) {
m_hasThumbs = true;
connect(&m_startThumbTimer, SIGNAL(timeout()), this, SLOT(slotGetStartThumb()));
m_endThumbTimer.setSingleShot(true);
connect(&m_endThumbTimer, SIGNAL(timeout()), this, SLOT(slotGetEndThumb()));
- connect(m_clip->thumbProducer(), SIGNAL(thumbReady(int, QImage)), this, SLOT(slotThumbReady(int, QImage)));
+ connect(m_clip->thumbProducer(), SIGNAL(thumbReady(int,QImage)), this, SLOT(slotThumbReady(int,QImage)));
connect(m_clip, SIGNAL(gotAudioData()), this, SLOT(slotGotAudioData()));
if (generateThumbs) QTimer::singleShot(200, this, SLOT(slotFetchThumbs()));
}
- } else if (m_clipType == COLOR) {
+ } else if (m_clipType == Color) {
QString colour = m_clip->getProperty("colour");
colour = colour.replace(0, 2, "#");
m_baseColor = QColor(colour.left(7));
- } else if (m_clipType == IMAGE || m_clipType == TEXT) {
+ } else if (m_clipType == Image || m_clipType == Text) {
m_baseColor = QColor(141, 166, 215);
- if (m_clipType == TEXT) {
- connect(m_clip->thumbProducer(), SIGNAL(thumbReady(int, QImage)), this, SLOT(slotThumbReady(int, QImage)));
+ if (m_clipType == Text) {
+ connect(m_clip->thumbProducer(), SIGNAL(thumbReady(int,QImage)), this, SLOT(slotThumbReady(int,QImage)));
}
//m_startPix = KThumb::getImage(KUrl(clip->getProperty("resource")), (int)(KdenliveSettings::trackheight() * KdenliveSettings::project_display_ratio()), KdenliveSettings::trackheight());
- } else if (m_clipType == AUDIO) {
+ } else if (m_clipType == Audio) {
m_baseColor = QColor(141, 215, 166);
connect(m_clip, SIGNAL(gotAudioData()), this, SLOT(slotGotAudioData()));
}
+ m_paintColor = m_baseColor;
}
blockSignals(true);
m_endThumbTimer.stop();
m_startThumbTimer.stop();
- if (scene()) scene()->removeItem(this);
- if (m_clipType == VIDEO || m_clipType == AV || m_clipType == SLIDESHOW || m_clipType == PLAYLIST) {
- //disconnect(m_clip->thumbProducer(), SIGNAL(thumbReady(int, QImage)), this, SLOT(slotThumbReady(int, QImage)));
+ if (scene())
+ scene()->removeItem(this);
+ if (m_clipType == Video || m_clipType == AV || m_clipType == SlideShow || m_clipType == Playlist) {
+ //disconnect(m_clip->thumbProducer(), SIGNAL(thumbReady(int,QImage)), this, SLOT(slotThumbReady(int,QImage)));
//disconnect(m_clip, SIGNAL(gotAudioData()), this, SLOT(slotGotAudioData()));
}
delete m_timeLine;
}
-ClipItem *ClipItem::clone(ItemInfo info) const
+ClipItem *ClipItem::clone(const ItemInfo &info) const
{
ClipItem *duplicate = new ClipItem(m_clip, info, m_fps, m_speed, m_strobe, FRAME_SIZE);
- if (m_clipType == IMAGE || m_clipType == TEXT) duplicate->slotSetStartThumb(m_startPix);
- else if (m_clipType != COLOR) {
+ if (m_clipType == Image || m_clipType == Text) duplicate->slotSetStartThumb(m_startPix);
+ else if (m_clipType != Color) {
if (info.cropStart == m_info.cropStart) duplicate->slotSetStartThumb(m_startPix);
if (info.cropStart + (info.endPos - info.startPos) == m_info.cropStart + m_info.cropDuration) {
duplicate->slotSetEndThumb(m_endPix);
return duplicate;
}
-void ClipItem::setEffectList(const EffectsList effectList)
+void ClipItem::setEffectList(const EffectsList &effectList)
{
m_effectList.clone(effectList);
m_effectNames = m_effectList.effectNames().join(" / ");
if (!m_effectList.isEmpty()) {
- for (int i = 0; i < m_effectList.count(); i++) {
- QDomElement effect = m_effectList.at(i);
+ for (int i = 0; i < m_effectList.count(); ++i) {
+ QDomElement effect = m_effectList.at(i);
QString effectId = effect.attribute("id");
// check if it is a fade effect
QDomNodeList params = effect.elementsByTagName("parameter");
else if (fade < 0)
m_endFade = -fade;
}
- setSelectedEffect(0);
+ setSelectedEffect(1);
}
}
// Init parameter value & keyframes if required
QDomNodeList params = effect.elementsByTagName("parameter");
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
if (e.isNull())
}
if (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe") {
- if (e.attribute("keyframes").isEmpty()) {
- // Effect has a keyframe type parameter, we need to set the values
- e.setAttribute("keyframes", QString::number(cropStart().frames(m_fps)) + ':' + e.attribute("default"));
- }
- else if (offset != 0) {
- // adjust keyframes to this clip
- QString adjusted = adjustKeyframes(e.attribute("keyframes"), offset - cropStart().frames(m_fps));
- e.setAttribute("keyframes", adjusted);
- }
+ if (e.attribute("keyframes").isEmpty()) {
+ // Effect has a keyframe type parameter, we need to set the values
+ e.setAttribute("keyframes", QString::number((int) cropStart().frames(m_fps)) + ':' + e.attribute("default"));
+ }
+ else if (offset != 0) {
+ // adjust keyframes to this clip
+ QString adjusted = adjustKeyframes(e.attribute("keyframes"), offset - cropStart().frames(m_fps));
+ e.setAttribute("keyframes", adjusted);
+ }
}
if (e.attribute("type") == "geometry" && !e.hasAttribute("fixed")) {
// Effects with a geometry parameter need to sync in / out with parent clip
- effect.setAttribute("in", QString::number(cropStart().frames(m_fps)));
- effect.setAttribute("out", QString::number((cropStart() + cropDuration()).frames(m_fps) - 1));
- effect.setAttribute("_sync_in_out", "1");
- }
+ effect.setAttribute("in", QString::number((int) cropStart().frames(m_fps)));
+ effect.setAttribute("out", QString::number((int) (cropStart() + cropDuration()).frames(m_fps) - 1));
+ effect.setAttribute("_sync_in_out", "1");
+ }
}
if (effect.attribute("tag") == "volume" || effect.attribute("tag") == "brightness") {
if (effect.attribute("id") == "fadeout" || effect.attribute("id") == "fade_to_black") {
if (effect.attribute("id") == "fadein") {
if (m_effectList.hasEffect(QString(), "fade_from_black") == -1) {
int effectDuration = EffectsList::parameter(effect, "out").toInt();
- if (offset != 0) effectDuration -= offset;
+ if (offset != 0) effectDuration -= offset;
if (effectDuration > cropDuration().frames(m_fps)) {
effectDuration = cropDuration().frames(m_fps) / 2;
}
} else if (effect.attribute("id") == "fade_from_black") {
if (m_effectList.hasEffect(QString(), "fadein") == -1) {
int effectDuration = EffectsList::parameter(effect, "out").toInt();
- if (offset != 0) effectDuration -= offset;
+ if (offset != 0) effectDuration -= offset;
if (effectDuration > cropDuration().frames(m_fps)) {
effectDuration = cropDuration().frames(m_fps) / 2;
}
}
}
-const QString ClipItem::adjustKeyframes(QString keyframes, int offset)
+const QString ClipItem::adjustKeyframes(const QString &keyframes, int offset)
{
QStringList result;
// Simple keyframes
- const QStringList list = keyframes.split(';', QString::SkipEmptyParts);
+ const QStringList list = keyframes.split(QLatin1Char(';'), QString::SkipEmptyParts);
foreach(const QString &keyframe, list) {
- int pos = keyframe.section(':', 0, 0).toInt() - offset;
- QString newKey = QString::number(pos) + ":" + keyframe.section(':', 1);
- result.append(newKey);
+ const int pos = keyframe.section(':', 0, 0).toInt() - offset;
+ const QString newKey = QString::number(pos) + ":" + keyframe.section(':', 1);
+ result.append(newKey);
}
return result.join(";");
}
-bool ClipItem::checkKeyFrames()
+bool ClipItem::checkKeyFrames(int width, int height, int previousDuration, int cutPos)
{
bool clipEffectsModified = false;
QLocale locale;
// go through all effects this clip has
for (int ix = 0; ix < m_effectList.count(); ++ix) {
+ // Check geometry params
+ resizeGeometries(ix, width, height, previousDuration, cutPos == -1 ? 0 : cutPos, cropDuration().frames(m_fps) - 1);
+
+ // Check keyframe params
QStringList keyframeParams = keyframes(ix);
QStringList newKeyFrameParams;
bool effModified = false;
int lastPos = -1;
double lastValue = -1;
int start = cropStart().frames(m_fps);
- int end = (cropStart() + cropDuration()).frames(m_fps);
+ int end = (cropStart() + cropDuration()).frames(m_fps) - 1;
// go through all keyframes for one param
foreach(const QString &str, keyframes) {
if (pos > start) {
int diff = pos - lastPos;
double ratio = (double)(start - lastPos) / diff;
- double newValue = lastValue + (val - lastValue) * ratio;
- newKeyFrames.append(QString::number(start) + ':' + locale.toString(newValue));
+ int newValue = lastValue + (val - lastValue) * ratio;
+ newKeyFrames.append(QString::number(start) + ':' + QString::number(newValue));
modified = true;
}
cutKeyFrame = false;
int diff = pos - lastPos;
if (diff != 0) {
double ratio = (double)(end - lastPos) / diff;
- double newValue = lastValue + (val - lastValue) * ratio;
- newKeyFrames.append(QString::number(end) + ':' + locale.toString(newValue));
+ int newValue = lastValue + (val - lastValue) * ratio;
+ newKeyFrames.append(QString::number(end) + ':' + QString::number(newValue));
modified = true;
}
break;
} else {
- newKeyFrames.append(QString::number(pos) + ':' + locale.toString(val));
+ newKeyFrames.append(QString::number(pos) + ':' + QString::number(val));
}
}
lastPos = pos;
return clipEffectsModified;
}
-void ClipItem::setKeyframes(const int ix, const QStringList keyframes)
+void ClipItem::setKeyframes(const int ix, const QStringList &keyframes)
{
QDomElement effect = m_effectList.at(ix);
if (effect.attribute("disable") == "1") return;
QLocale locale;
QDomNodeList params = effect.elementsByTagName("parameter");
int keyframeParams = 0;
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
- if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe") && e.attribute("intimeline") == "1") {
+ if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe") && (!e.hasAttribute("intimeline") || e.attribute("intimeline") == "1")) {
e.setAttribute("keyframes", keyframes.at(keyframeParams));
- if (ix == m_selectedEffect && keyframeParams == 0) {
+ if (ix + 1 == m_selectedEffect && keyframeParams == 0) {
m_keyframes.clear();
m_visibleParam = i;
double max = locale.toDouble(e.attribute("max"));
QDomElement effect = effectAtIndex(m_selectedEffect);
if (!effect.isNull() && effect.attribute("disable") != "1") {
QDomNodeList params = effect.elementsByTagName("parameter");
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
- if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe") && e.attribute("intimeline") == "1") {
+ if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe") && (!e.hasAttribute("intimeline") || e.attribute("intimeline") == "1")) {
m_keyframes.clear();
m_limitedKeyFrames = e.attribute("type") == "keyframe";
m_visibleParam = i;
}
}
+void ClipItem::resizeGeometries(const int index, int width, int height, int previousDuration, int start, int duration)
+{
+ QString geom;
+ QDomElement effect = m_effectList.at(index);
+ QDomNodeList params = effect.elementsByTagName("parameter");
+
+ for (int i = 0; i < params.count(); ++i) {
+ QDomElement e = params.item(i).toElement();
+ if (!e.isNull() && e.attribute("type") == "geometry") {
+ geom = e.attribute("value");
+ Mlt::Geometry geometry(geom.toUtf8().data(), previousDuration, width, height);
+ e.setAttribute("value", geometry.serialise(start, start + duration));
+ }
+ }
+}
+
QStringList ClipItem::keyframes(const int index)
{
QStringList result;
QDomElement effect = m_effectList.at(index);
QDomNodeList params = effect.elementsByTagName("parameter");
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe"))
result.append(e.attribute("keyframes"));
QDomElement ClipItem::selectedEffect()
{
- if (m_selectedEffect == -1 || m_effectList.isEmpty()) return QDomElement();
+ if (m_selectedEffect == -1 || m_effectList.isEmpty())
+ return QDomElement();
return effectAtIndex(m_selectedEffect);
}
{
if (checkDuration && (m_maxDuration != m_clip->maxDuration())) {
m_maxDuration = m_clip->maxDuration();
- if (m_clipType != IMAGE && m_clipType != TEXT && m_clipType != COLOR) {
+ if (m_clipType != Image && m_clipType != Text && m_clipType != Color) {
if (m_maxDuration != GenTime() && m_info.cropStart + m_info.cropDuration > m_maxDuration) {
// Clip duration changed, make sure to stay in correct range
if (m_info.cropStart > m_maxDuration) {
}
}
}
- if (m_clipType == COLOR) {
+ if (m_clipType == Color) {
QString colour = m_clip->getProperty("colour");
colour = colour.replace(0, 2, "#");
m_baseColor = QColor(colour.left(7));
void ClipItem::slotFetchThumbs()
{
- if (scene() == NULL || m_clipType == AUDIO || m_clipType == COLOR) return;
- if (m_clipType == IMAGE) {
+ if (scene() == NULL || m_clipType == Audio || m_clipType == Color) return;
+ if (m_clipType == Image) {
if (m_startPix.isNull()) {
m_startPix = KThumb::getImage(KUrl(m_clip->getProperty("resource")), (int)(KdenliveSettings::trackheight() * KdenliveSettings::project_display_ratio()), KdenliveSettings::trackheight());
update();
return;
}
- if (m_clipType == TEXT) {
+ if (m_clipType == Text) {
if (m_startPix.isNull()) slotGetStartThumb();
return;
}
}
-void ClipItem::slotSetStartThumb(QImage img)
+void ClipItem::slotSetStartThumb(const QImage &img)
{
if (!img.isNull() && img.format() == QImage::Format_ARGB32) {
QPixmap pix = QPixmap::fromImage(img);
}
}
-void ClipItem::slotSetEndThumb(QImage img)
+void ClipItem::slotSetEndThumb(const QImage &img)
{
if (!img.isNull() && img.format() == QImage::Format_ARGB32) {
QPixmap pix = QPixmap::fromImage(img);
}
}
-void ClipItem::slotThumbReady(int frame, QImage img)
+void ClipItem::slotThumbReady(int frame, const QImage &img)
{
if (scene() == NULL) return;
QRectF r = boundingRect();
m_startPix = pix;
m_startThumbRequested = false;
update(r.left(), r.top(), width, pix.height());
- if (m_clipType == IMAGE || m_clipType == TEXT) {
+ if (m_clipType == Image || m_clipType == Text) {
update(r.right() - width, r.top(), width, pix.height());
}
} else if (m_endThumbRequested && frame == (m_speedIndependantInfo.cropStart + m_speedIndependantInfo.cropDuration).frames(m_fps) - 1) {
}
}
-void ClipItem::slotSetStartThumb(const QPixmap pix)
+void ClipItem::slotSetStartThumb(const QPixmap &pix)
{
m_startPix = pix;
}
-void ClipItem::slotSetEndThumb(const QPixmap pix)
+void ClipItem::slotSetEndThumb(const QPixmap &pix)
{
m_endPix = pix;
}
int ClipItem::type() const
{
- return AVWIDGET;
+ return AVWidget;
}
DocClipBase *ClipItem::baseClip() const
QWidget *)
{
QPalette palette = scene()->palette();
- QColor paintColor;
+ QColor paintColor = m_paintColor;
QColor textColor;
QColor textBgColor;
QPen framePen;
- if (parentItem()) paintColor = QColor(255, 248, 149);
- else paintColor = m_baseColor;
if (isSelected() || (parentItem() && parentItem()->isSelected())) {
- textColor = palette.highlightedText().color();
- textBgColor = palette.highlight().color();
- paintColor = paintColor.darker();
+ textColor = palette.highlightedText().color();
+ textBgColor = palette.highlight().color();
framePen.setColor(textBgColor);
+ paintColor.setRed(qMin(paintColor.red() * 2, 255));
}
else {
- textColor = palette.text().color();
- textBgColor = palette.window().color();
- textBgColor.setAlpha(200);
- framePen.setColor(paintColor.darker());
+ textColor = palette.text().color();
+ textBgColor = palette.window().color();
+ textBgColor.setAlpha(200);
+ framePen.setColor(m_paintColor.darker());
}
const QRectF exposed = option->exposedRect;
- const QRectF mappedExposed = painter->worldTransform().mapRect(exposed);
- const QRectF mapped = painter->worldTransform().mapRect(rect());
- painter->setWorldMatrixEnabled(false);
+ const QTransform transformation = painter->worldTransform();
+ const QRectF mappedExposed = transformation.mapRect(exposed);
+ const QRectF mapped = transformation.mapRect(rect());
+ painter->setWorldTransform(QTransform());
QPainterPath p;
p.addRect(mappedExposed);
QPainterPath q;
q.addRoundedRect(mapped, 3, 3);
+ painter->setRenderHints(QPainter::Antialiasing | QPainter::SmoothPixmapTransform, false);
painter->setClipPath(p.intersected(q));
painter->setPen(Qt::NoPen);
painter->fillRect(mappedExposed, paintColor);
- painter->setPen(paintColor.darker());
+ painter->setPen(m_paintColor.darker());
// draw thumbnails
if (KdenliveSettings::videothumbnails() && !isAudioOnly()) {
- if ((m_clipType == IMAGE || m_clipType == TEXT) && !m_startPix.isNull()) {
- const QPointF top = mapped.topRight() - QPointF(m_startPix.width() - 1, 0);
- painter->drawPixmap(top, m_startPix);
- QLineF l2(top.x(), mapped.top(), top.x(), mapped.bottom());
- painter->drawLine(l2);
+ QRectF thumbRect;
+ if ((m_clipType == Image || m_clipType == Text) && !m_startPix.isNull()) {
+ if (thumbRect.isNull()) thumbRect = QRectF(0, 0, mapped.height() / m_startPix.height() * m_startPix.width(), mapped.height());
+ thumbRect.moveTopRight(mapped.topRight());
+ painter->drawPixmap(thumbRect, m_startPix, m_startPix.rect());
+ //const QPointF top = mapped.topRight() - QPointF(m_startPix.width() - 1, 0);
+ //painter->drawPixmap(top, m_startPix);
+ //QLineF l2(top.x(), mapped.top(), top.x(), mapped.bottom());
+ //painter->drawLine(l2);
} else if (!m_endPix.isNull()) {
- const QPointF top = mapped.topRight() - QPointF(m_endPix.width() - 1, 0);
- painter->drawPixmap(top, m_endPix);
- QLineF l2(top.x(), mapped.top(), top.x(), mapped.bottom());
- painter->drawLine(l2);
+ if (thumbRect.isNull()) thumbRect = QRectF(0, 0, mapped.height() / m_endPix.height() * m_endPix.width(), mapped.height());
+ thumbRect.moveTopRight(mapped.topRight());
+ painter->drawPixmap(thumbRect, m_endPix, m_endPix.rect());
+ //const QPointF top = mapped.topRight() - QPointF(m_endPix.width() - 1, 0);
+ //painter->drawPixmap(top, m_endPix);
+ //QLineF l2(top.x(), mapped.top(), top.x(), mapped.bottom());
+ //painter->drawLine(l2);
}
if (!m_startPix.isNull()) {
- painter->drawPixmap(mapped.topLeft(), m_startPix);
- QLineF l2(mapped.left() + m_startPix.width(), mapped.top(), mapped.left() + m_startPix.width(), mapped.bottom());
- painter->drawLine(l2);
+ if (thumbRect.isNull()) thumbRect = QRectF(0, 0, mapped.height() / m_startPix.height() * m_startPix.width(), mapped.height());
+ thumbRect.moveTopLeft(mapped.topLeft());
+ painter->drawPixmap(thumbRect, m_startPix, m_startPix.rect());
+ //painter->drawPixmap(mapped.topLeft(), m_startPix);
+ //QLineF l2(mapped.left() + m_startPix.width(), mapped.top(), mapped.left() + m_startPix.width(), mapped.bottom());
+ //painter->drawLine(l2);
}
// if we are in full zoom, paint thumbnail for every frame
- if (m_clip->thumbProducer() && clipType() != COLOR && clipType() != AUDIO && !m_audioOnly && painter->worldTransform().m11() == FRAME_SIZE) {
+ if (m_clip->thumbProducer() && clipType() != Color && clipType() != Audio && !m_audioOnly && transformation.m11() == FRAME_SIZE) {
int offset = (m_info.startPos - m_info.cropStart).frames(m_fps);
int left = qMax((int) m_info.cropStart.frames(m_fps) + 1, (int) mapToScene(exposed.left(), 0).x() - offset);
int right = qMin((int)(m_info.cropStart + m_info.cropDuration).frames(m_fps) - 1, (int) mapToScene(exposed.right(), 0).x() - offset);
QPointF startPos = mapped.topLeft();
int startOffset = m_info.cropStart.frames(m_fps);
- if (clipType() == IMAGE || clipType() == TEXT) {
- for (int i = left; i <= right; i++) {
+ if (clipType() == Image || clipType() == Text) {
+ for (int i = left; i <= right; ++i) {
painter->drawPixmap(startPos + QPointF(FRAME_SIZE *(i - startOffset), 0), m_startPix);
}
}
QPen pen(Qt::white);
pen.setStyle(Qt::DotLine);
QList <int> missing;
- for (int i = left; i <= right; i++) {
+ for (int i = left; i <= right; ++i) {
img = m_clip->thumbProducer()->findCachedThumb(path + QString::number(i));
QPointF xpos = startPos + QPointF(FRAME_SIZE *(i - startOffset), 0);
if (img.isNull()) missing << i;
- else painter->drawImage(xpos, img);
+ else {
+ painter->drawImage(xpos, img);
+ }
painter->drawLine(xpos, xpos + QPointF(0, mapped.height()));
}
if (!missing.isEmpty()) {
}
}
}
-
// draw audio thumbnails
- if (KdenliveSettings::audiothumbnails() && m_speed == 1.0 && !isVideoOnly() && ((m_clipType == AV && (exposed.bottom() > (rect().height() / 2) || isAudioOnly())) || m_clipType == AUDIO) && m_audioThumbReady) {
+ if (KdenliveSettings::audiothumbnails() && m_speed == 1.0 && !isVideoOnly() && ((m_clipType == AV && (exposed.bottom() > (rect().height() / 2) || isAudioOnly())) || m_clipType == Audio) && m_audioThumbReady) {
double startpixel = exposed.left();
if (startpixel < 0)
mappedRect.setTop(mappedRect.bottom() - mapped.height() / 2);
} else mappedRect = mapped;
- double scale = painter->worldTransform().m11();
+ double scale = transformation.m11();
int channels = 0;
if (isEnabled() && m_clip) channels = m_clip->getProperty("channels").toInt();
if (scale != m_framePixelWidth)
m_audioThumbCachePic.clear();
double cropLeft = m_info.cropStart.frames(m_fps);
const int clipStart = mappedRect.x();
- const int mappedStartPixel = painter->worldTransform().map(QPointF(startpixel + cropLeft, 0)).x() - clipStart;
- const int mappedEndPixel = painter->worldTransform().map(QPointF(endpixel + cropLeft, 0)).x() - clipStart;
+ const int mappedStartPixel = transformation.map(QPointF(startpixel + cropLeft, 0)).x() - clipStart;
+ const int mappedEndPixel = transformation.map(QPointF(endpixel + cropLeft, 0)).x() - clipStart;
cropLeft = cropLeft * scale;
if (channels >= 1) {
- emit prepareAudioThumb(scale, mappedStartPixel, mappedEndPixel, channels);
+ emit prepareAudioThumb(scale, mappedStartPixel, mappedEndPixel, channels, (int) (mappedRect.height() + 0.5));
}
-
+ QRectF pixmapRect(0, mappedRect.y(), 100, mappedRect.height());
for (int startCache = mappedStartPixel - (mappedStartPixel) % 100; startCache < mappedEndPixel; startCache += 100) {
- if (!m_audioThumbCachePic.value(startCache).isNull())
- painter->drawPixmap(clipStart + startCache - cropLeft, mappedRect.y(), m_audioThumbCachePic.value(startCache));
+ if (!m_audioThumbCachePic.value(startCache).isNull()) {
+ //painter->drawPixmap(clipStart + startCache - cropLeft, mappedRect.y(), m_audioThumbCachePic.value(startCache));
+ QPixmap pix(m_audioThumbCachePic.value(startCache));
+ pixmapRect.moveLeft(clipStart + startCache - cropLeft);
+ painter->drawPixmap(pixmapRect, pix, pix.rect());
+ }
}
}
if (m_isMainSelectedClip) {
- framePen.setColor(Qt::red);
- textBgColor = Qt::red;
+ framePen.setColor(Qt::red);
+ textBgColor = Qt::red;
}
// only paint details if clip is big enough
if (!m_effectNames.isEmpty() && mapped.width() > 40) {
QRectF txtBounding = painter->boundingRect(mapped, Qt::AlignLeft | Qt::AlignTop, m_effectNames);
QColor bColor = palette.window().color();
- QColor tColor = palette.text().color();
- tColor.setAlpha(220);
+ QColor tColor = palette.text().color();
+ tColor.setAlpha(220);
if (m_timeLine && m_timeLine->state() == QTimeLine::Running) {
qreal value = m_timeLine->currentValue();
txtBounding.setWidth(txtBounding.width() * value);
bColor.setAlpha(100 + 50 * value);
};
- painter->setBrush(bColor);
- painter->setPen(Qt::NoPen);
- painter->drawRoundedRect(txtBounding.adjusted(-1, -2, 4, -1), 3, 3);
+ painter->setBrush(bColor);
+ painter->setPen(Qt::NoPen);
+ painter->drawRoundedRect(txtBounding.adjusted(-1, -2, 4, -1), 3, 3);
painter->setPen(tColor);
painter->drawText(txtBounding.adjusted(2, 0, 1, -1), Qt::AlignCenter, m_effectNames);
}
// Draw clip name
const QRectF txtBounding2 = painter->boundingRect(mapped, Qt::AlignRight | Qt::AlignTop, m_clipName + ' ').adjusted(0, -1, 0, -1);
- painter->setPen(Qt::NoPen);
+ painter->setPen(Qt::NoPen);
painter->fillRect(txtBounding2.adjusted(-3, 0, 0, 0), textBgColor);
painter->setBrush(QBrush(Qt::NoBrush));
- painter->setPen(textColor);
+ painter->setPen(textColor);
if (m_videoOnly) {
painter->drawPixmap(txtBounding2.topLeft() - QPointF(17, -1), m_videoPix);
} else if (m_audioOnly) {
QList < CommentedTime >::Iterator it = markers.begin();
GenTime pos;
double framepos;
- QBrush markerBrush(QColor(120, 120, 0, 140));
+ QBrush markerBrush(QColor(120, 120, 0, 140));
QPen pen = painter->pen();
for (; it != markers.end(); ++it) {
if (pos > GenTime()) {
if (pos > cropDuration()) break;
QLineF l(rect().x() + pos.frames(m_fps), rect().y(), rect().x() + pos.frames(m_fps), rect().bottom());
- QLineF l2 = painter->worldTransform().map(l);
- pen.setColor(CommentedTime::markerColor((*it).markerType()));
- pen.setStyle(Qt::DotLine);
+ QLineF l2 = transformation.map(l);
+ pen.setColor(CommentedTime::markerColor((*it).markerType()));
+ pen.setStyle(Qt::DotLine);
painter->setPen(pen);
painter->drawLine(l2);
if (KdenliveSettings::showmarkers()) {
framepos = rect().x() + pos.frames(m_fps);
const QRectF r1(framepos + 0.04, rect().height()/3, rect().width() - framepos - 2, rect().height() / 2);
- const QRectF r2 = painter->worldTransform().mapRect(r1);
+ const QRectF r2 = transformation.mapRect(r1);
const QRectF txtBounding3 = painter->boundingRect(r2, Qt::AlignLeft | Qt::AlignTop, ' ' + (*it).comment() + ' ');
painter->setBrush(markerBrush);
- pen.setStyle(Qt::SolidLine);
+ pen.setStyle(Qt::SolidLine);
painter->setPen(pen);
painter->drawRect(txtBounding3);
painter->setBrush(Qt::NoBrush);
fadeInPath.lineTo(0, rect().height());
fadeInPath.lineTo(m_startFade, 0);
fadeInPath.closeSubpath();
- QPainterPath f1 = painter->worldTransform().map(fadeInPath);
+ QPainterPath f1 = transformation.map(fadeInPath);
painter->fillPath(f1/*.intersected(resultClipPath)*/, fades);
/*if (isSelected()) {
QLineF l(m_startFade * scale, 0, 0, itemHeight);
fadeOutPath.lineTo(rect().width(), rect().height());
fadeOutPath.lineTo(rect().width() - m_endFade, 0);
fadeOutPath.closeSubpath();
- QPainterPath f1 = painter->worldTransform().map(fadeOutPath);
+ QPainterPath f1 = transformation.map(fadeOutPath);
painter->fillPath(f1/*.intersected(resultClipPath)*/, fades);
/*if (isSelected()) {
QLineF l(itemWidth - m_endFade * scale, 0, itemWidth, itemHeight);
painter->setPen(QPen(Qt::lightGray));
// draw effect or transition keyframes
- drawKeyFrames(painter, m_limitedKeyFrames);
+ drawKeyFrames(painter, transformation, m_limitedKeyFrames);
}
// draw clip border
}
-OPERATIONTYPE ClipItem::operationMode(QPointF pos)
+OperationType ClipItem::operationMode(const QPointF &pos)
{
- if (isItemLocked()) return NONE;
+ if (isItemLocked()) return None;
const double scale = projectScene()->scale().x();
double maximumOffset = 6 / scale;
if (isSelected() || (parentItem() && parentItem()->isSelected())) {
int kf = mouseOverKeyFrames(pos, maximumOffset);
if (kf != -1) {
m_editedKeyframe = kf;
- return KEYFRAME;
+ return KeyFrame;
}
}
QRectF rect = sceneBoundingRect();
int addtransitionOffset = 10;
// Don't allow add transition if track height is very small. No transitions for audio only clips
- if (rect.height() < 30 || isAudioOnly() || m_clipType == AUDIO) addtransitionOffset = 0;
+ if (rect.height() < 30 || isAudioOnly() || m_clipType == Audio) addtransitionOffset = 0;
if (qAbs((int)(pos.x() - (rect.x() + m_startFade))) < maximumOffset && qAbs((int)(pos.y() - rect.y())) < 6) {
- return FADEIN;
+ return FadeIn;
} else if ((pos.x() <= rect.x() + rect.width() / 2) && pos.x() - rect.x() < maximumOffset && (rect.bottom() - pos.y() > addtransitionOffset)) {
- return RESIZESTART;
+ // If we are in a group, allow resize only if all clips start at same position
+ if (parentItem()) {
+ QGraphicsItemGroup *dragGroup = static_cast <QGraphicsItemGroup *>(parentItem());
+ QList<QGraphicsItem *> list = dragGroup->childItems();
+ for (int i = 0; i < list.count(); ++i) {
+ if (list.at(i)->type() == AVWidget) {
+ ClipItem *c = static_cast <ClipItem*>(list.at(i));
+ if (c->startPos() != startPos()) return MoveOperation;
+ }
+ }
+ }
+ return ResizeStart;
} else if (qAbs((int)(pos.x() - (rect.x() + rect.width() - m_endFade))) < maximumOffset && qAbs((int)(pos.y() - rect.y())) < 6) {
- return FADEOUT;
+ return FadeOut;
} else if ((pos.x() >= rect.x() + rect.width() / 2) && (rect.right() - pos.x() < maximumOffset) && (rect.bottom() - pos.y() > addtransitionOffset)) {
- return RESIZEEND;
+ // If we are in a group, allow resize only if all clips end at same position
+ if (parentItem()) {
+ QGraphicsItemGroup *dragGroup = static_cast <QGraphicsItemGroup *>(parentItem());
+ QList<QGraphicsItem *> list = dragGroup->childItems();
+ for (int i = 0; i < list.count(); ++i) {
+ if (list.at(i)->type() == AVWidget) {
+ ClipItem *c = static_cast <ClipItem*>(list.at(i));
+ if (c->endPos() != endPos()) return MoveOperation;
+ }
+ }
+ }
+ return ResizeEnd;
} else if ((pos.x() - rect.x() < 16 / scale) && (rect.bottom() - pos.y() <= addtransitionOffset)) {
- return TRANSITIONSTART;
+ return TransitionStart;
} else if ((rect.right() - pos.x() < 16 / scale) && (rect.bottom() - pos.y() <= addtransitionOffset)) {
- return TRANSITIONEND;
+ return TransitionEnd;
}
- return MOVE;
+ return MoveOperation;
}
int ClipItem::itemHeight()
QList < GenTime > markers = m_clip->snapMarkers();
GenTime pos;
- for (int i = 0; i < markers.size(); i++) {
+ for (int i = 0; i < markers.size(); ++i) {
pos = GenTime((int)(markers.at(i).frames(m_fps) / qAbs(m_speed) + 0.5), m_fps) - cropStart();
if (pos > GenTime()) {
if (pos > cropDuration()) break;
QList < CommentedTime > markers = m_clip->commentedSnapMarkers();
GenTime pos;
- for (int i = 0; i < markers.size(); i++) {
+ for (int i = 0; i < markers.size(); ++i) {
pos = GenTime((int)(markers.at(i).time().frames(m_fps) / qAbs(m_speed) + 0.5), m_fps) - cropStart();
if (pos > GenTime()) {
if (pos > cropDuration()) break;
return snaps;
}
-void ClipItem::slotPrepareAudioThumb(double pixelForOneFrame, int startpixel, int endpixel, int channels)
+void ClipItem::slotPrepareAudioThumb(double pixelForOneFrame, int startpixel, int endpixel, int channels, int pixelHeight)
{
// Bail out, if caller provided invalid data
if (channels <= 0) {
- kWarning() << "Unable to draw image with " << channels << "number of channels";
+ kWarning() << "Unable to draw image with " << channels << "number of channels";
return;
}
- QRectF re = sceneBoundingRect();
- if (m_clipType == AV && !isAudioOnly()) re.setTop(re.y() + re.height() / 2);
int factor = 64;
if (KdenliveSettings::normaliseaudiothumbs()) {
- factor = m_clip->getProperty("audio_max").toInt();
+ factor = m_clip->getProperty("audio_max").toInt();
}
//kDebug() << "// PREP AUDIO THMB FRMO : scale:" << pixelForOneFrame<< ", from: " << startpixel << ", to: " << endpixel;
QPen audiopen;
audiopen.setWidth(0);
if (simplifiedAudio) channels = 1;
- int channelHeight = re.height() / channels;
+ int channelHeight = pixelHeight / channels;
QMap<int, QPainterPath > positiveChannelPaths;
QMap<int, QPainterPath > negativeChannelPaths;
if (m_framePixelWidth == pixelForOneFrame && m_audioThumbCachePic.contains(startCache))
continue;
if (m_audioThumbCachePic.value(startCache).isNull() || m_framePixelWidth != pixelForOneFrame) {
- QPixmap pix(100, (int)(re.height()));
- pix.fill(QColor(180, 180, 180, 150));
- m_audioThumbCachePic[startCache] = pix;
+ QPixmap pix(100, pixelHeight);
+ pix.fill(QColor(180, 180, 180, 150));
+ m_audioThumbCachePic[startCache] = pix;
}
positiveChannelPaths.clear();
negativeChannelPaths.clear();
QPainter pixpainter(&m_audioThumbCachePic[startCache]);
- for (int i = 0; i < channels; i++) {
- if (simplifiedAudio) {
- positiveChannelPaths[i].moveTo(-1, channelHeight);
- }
- else if (fullAreaDraw) {
- positiveChannelPaths[i].moveTo(-1, channelHeight*i + channelHeight / 2);
- negativeChannelPaths[i].moveTo(-1, channelHeight*i + channelHeight / 2);
- }
- else {
- positiveChannelPaths[i].moveTo(-1, channelHeight*i + channelHeight / 2);
- audiopen.setColor(QColor(60, 60, 60, 50));
- pixpainter.setPen(audiopen);
- pixpainter.drawLine(0, channelHeight*i + channelHeight / 2, 100, channelHeight*i + channelHeight / 2);
- }
- }
+ for (int i = 0; i < channels; ++i) {
+ if (simplifiedAudio) {
+ positiveChannelPaths[i].moveTo(-1, channelHeight);
+ }
+ else if (fullAreaDraw) {
+ positiveChannelPaths[i].moveTo(-1, channelHeight*i + channelHeight / 2);
+ negativeChannelPaths[i].moveTo(-1, channelHeight*i + channelHeight / 2);
+ }
+ else {
+ positiveChannelPaths[i].moveTo(-1, channelHeight*i + channelHeight / 2);
+ audiopen.setColor(QColor(60, 60, 60, 50));
+ pixpainter.setPen(audiopen);
+ pixpainter.drawLine(0, channelHeight*i + channelHeight / 2, 100, channelHeight*i + channelHeight / 2);
+ }
+ }
for (int samples = 0; samples <= 100; samples++) {
double frame = (double)(samples + startCache - 0) / pixelForOneFrame;
const QMap<int, QByteArray> frame_channel_data = baseClip()->audioFrameCache.value((int)frame);
for (int channel = 0; channel < channels && !frame_channel_data.value(channel).isEmpty(); channel++) {
- int y = channelHeight * channel + channelHeight / 2;
- if (simplifiedAudio) {
- double delta = qAbs((frame_channel_data.value(channel).at(sample) - 63.5) * channelHeight / factor);
- positiveChannelPaths[channel].lineTo(samples, channelHeight - delta);
- } else if (fullAreaDraw) {
- double delta = qAbs((frame_channel_data.value(channel).at(sample) - 63.5) * channelHeight / (2 * factor));
+ int y = channelHeight * channel + channelHeight / 2;
+ if (simplifiedAudio) {
+ double delta = qAbs((frame_channel_data.value(channel).at(sample) - 63.5) * channelHeight / factor);
+ positiveChannelPaths[channel].lineTo(samples, channelHeight - delta);
+ } else if (fullAreaDraw) {
+ double delta = qAbs((frame_channel_data.value(channel).at(sample) - 63.5) * channelHeight / (2 * factor));
positiveChannelPaths[channel].lineTo(samples, y + delta);
negativeChannelPaths[channel].lineTo(samples, y - delta);
} else {
- double delta = (frame_channel_data.value(channel).at(sample) - 63.5) * channelHeight / (2 * factor);
- positiveChannelPaths[channel].lineTo(samples, y + delta);
+ double delta = (frame_channel_data.value(channel).at(sample) - 63.5) * channelHeight / (2 * factor);
+ positiveChannelPaths[channel].lineTo(samples, y + delta);
}
}
}
for (int channel = 0; channel < channels; channel++) {
- if (simplifiedAudio) {
- positiveChannelPaths[channel].lineTo(101, channelHeight);
- } else if (fullAreaDraw) {
- int y = channelHeight * channel + channelHeight / 2;
- positiveChannelPaths[channel].lineTo(101, y);
- negativeChannelPaths[channel].lineTo(101, y);
- }
- }
+ if (simplifiedAudio) {
+ positiveChannelPaths[channel].lineTo(101, channelHeight);
+ } else if (fullAreaDraw) {
+ int y = channelHeight * channel + channelHeight / 2;
+ positiveChannelPaths[channel].lineTo(101, y);
+ negativeChannelPaths[channel].lineTo(101, y);
+ }
+ }
if (fullAreaDraw || simplifiedAudio) {
- audiopen.setColor(QColor(80, 80, 80, 200));
- pixpainter.setPen(audiopen);
- pixpainter.setBrush(QBrush(QColor(120, 120, 120, 200)));
- }
- else {
- audiopen.setColor(QColor(60, 60, 60, 100));
- pixpainter.setPen(audiopen);
- pixpainter.setBrush(Qt::NoBrush);
- }
- for (int i = 0; i < channels; i++) {
+ audiopen.setColor(QColor(80, 80, 80, 200));
+ pixpainter.setPen(audiopen);
+ pixpainter.setBrush(QBrush(QColor(120, 120, 120, 200)));
+ }
+ else {
+ audiopen.setColor(QColor(60, 60, 60, 100));
+ pixpainter.setPen(audiopen);
+ pixpainter.setBrush(Qt::NoBrush);
+ }
+ pixpainter.setRenderHint(QPainter::Antialiasing, false);
+ for (int i = 0; i < channels; ++i) {
if (fullAreaDraw) {
pixpainter.drawPath(positiveChannelPaths[i].united(negativeChannelPaths.value(i)));
} else
void ClipItem::resizeStart(int posx, bool /*size*/, bool emitChange)
{
bool sizeLimit = false;
- if (clipType() != IMAGE && clipType() != COLOR && clipType() != TEXT) {
+ if (clipType() != Image && clipType() != Color && clipType() != Text) {
const int min = (startPos() - cropStart()).frames(m_fps);
if (posx < min) posx = min;
sizeLimit = true;
QVariant ClipItem::itemChange(GraphicsItemChange change, const QVariant &value)
{
if (change == QGraphicsItem::ItemSelectedChange) {
- if (value.toBool()) setZValue(10);
- else setZValue(2);
+ if (value.toBool())
+ setZValue(10);
+ else
+ setZValue(2);
}
if (change == ItemPositionChange && scene()) {
// calculate new position.
int xpos = projectScene()->getSnapPointForPos((int) newPos.x(), KdenliveSettings::snaptopoints());
xpos = qMax(xpos, 0);
newPos.setX(xpos);
- // Warning: newPos gives a position relative to the click event, so hack to get absolute pos
- int yOffset = property("y_absolute").toInt() + newPos.y();
+ // Warning: newPos gives a position relative to the click event, so hack to get absolute pos
+ int yOffset = property("y_absolute").toInt() + newPos.y();
int newTrack = yOffset / KdenliveSettings::trackheight();
newTrack = qMin(newTrack, projectScene()->tracksCount() - 1);
newTrack = qMax(newTrack, 0);
- QStringList lockedTracks = property("locked_tracks").toStringList();
- if (lockedTracks.contains(QString::number(newTrack))) {
- // Trying to move to a locked track
- return pos();
- }
+ QStringList lockedTracks = property("locked_tracks").toStringList();
+ if (lockedTracks.contains(QString::number(newTrack))) {
+ // Trying to move to a locked track
+ return pos();
+ }
newPos.setY((int)(newTrack * KdenliveSettings::trackheight() + 1));
// Only one clip is moving
QRectF sceneShape = rect();
bool forwardMove = newPos.x() > pos().x();
int offset = 0;
if (!items.isEmpty()) {
- for (int i = 0; i < items.count(); i++) {
+ for (int i = 0; i < items.count(); ++i) {
if (!items.at(i)->isEnabled()) continue;
if (items.at(i)->type() == type()) {
// Collision!
//kDebug()<<"// ITEM NEW POS: "<<newPos.x()<<", mapped: "<<mapToScene(newPos.x(), 0).x();
return newPos;
}
+ if (change == ItemParentChange) {
+ QGraphicsItem* parent = value.value<QGraphicsItem*>();
+ if (parent) m_paintColor = m_baseColor.lighter(135);
+ else m_paintColor = m_baseColor;
+ }
return QGraphicsItem::itemChange(change, value);
}
int ix;
QDomElement insertedEffect;
if (!effect.hasAttribute("kdenlive_ix")) {
- // effect dropped from effect list
+ // effect dropped from effect list
ix = effectsCounter();
} else ix = effect.attribute("kdenlive_ix").toInt();
if (!m_effectList.isEmpty() && ix <= m_effectList.count()) {
int effectOut;
if (effect.attribute("tag") == "affine") {
- // special case: the affine effect needs in / out points
- effectIn = effect.attribute("in").toInt();
- effectOut = effect.attribute("out").toInt();
+ // special case: the affine effect needs in / out points
+ effectIn = effect.attribute("in").toInt();
+ effectOut = effect.attribute("out").toInt();
}
else {
- effectIn = EffectsList::parameter(effect, "in").toInt();
- effectOut = EffectsList::parameter(effect, "out").toInt();
+ effectIn = EffectsList::parameter(effect, "in").toInt();
+ effectOut = EffectsList::parameter(effect, "out").toInt();
}
EffectsParameterList parameters;
// check if it is a fade effect
if (effectId == "fadein") {
- needRepaint = true;
+ needRepaint = true;
if (m_effectList.hasEffect(QString(), "fade_from_black") == -1) {
- fade = effectOut - effectIn;
+ fade = effectOut - effectIn;
}/* else {
- QDomElement fadein = m_effectList.getEffectByTag(QString(), "fade_from_black");
+ QDomElement fadein = m_effectList.getEffectByTag(QString(), "fade_from_black");
if (fadein.attribute("name") == "out") fade += fadein.attribute("value").toInt();
else if (fadein.attribute("name") == "in") fade -= fadein.attribute("value").toInt();
}*/
} else if (effectId == "fade_from_black") {
- kDebug()<<"// FOUND FTB:"<<effectOut<<" - "<<effectIn;
- needRepaint = true;
+ needRepaint = true;
if (m_effectList.hasEffect(QString(), "fadein") == -1) {
- fade = effectOut - effectIn;
+ fade = effectOut - effectIn;
}/* else {
- QDomElement fadein = m_effectList.getEffectByTag(QString(), "fadein");
+ QDomElement fadein = m_effectList.getEffectByTag(QString(), "fadein");
if (fadein.attribute("name") == "out") fade += fadein.attribute("value").toInt();
else if (fadein.attribute("name") == "in") fade -= fadein.attribute("value").toInt();
}*/
- } else if (effectId == "fadeout") {
- needRepaint = true;
+ } else if (effectId == "fadeout") {
+ needRepaint = true;
if (m_effectList.hasEffect(QString(), "fade_to_black") == -1) {
- fade = effectIn - effectOut;
+ fade = effectIn - effectOut;
} /*else {
- QDomElement fadeout = m_effectList.getEffectByTag(QString(), "fade_to_black");
+ QDomElement fadeout = m_effectList.getEffectByTag(QString(), "fade_to_black");
if (fadeout.attribute("name") == "out") fade -= fadeout.attribute("value").toInt();
else if (fadeout.attribute("name") == "in") fade += fadeout.attribute("value").toInt();
}*/
} else if (effectId == "fade_to_black") {
- needRepaint = true;
+ needRepaint = true;
if (m_effectList.hasEffect(QString(), "fadeout") == -1) {
- fade = effectIn - effectOut;
+ fade = effectIn - effectOut;
}/* else {
- QDomElement fadeout = m_effectList.getEffectByTag(QString(), "fadeout");
+ QDomElement fadeout = m_effectList.getEffectByTag(QString(), "fadeout");
if (fadeout.attribute("name") == "out") fade -= fadeout.attribute("value").toInt();
else if (fadeout.attribute("name") == "in") fade += fadeout.attribute("value").toInt();
}*/
}
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
if (!e.isNull()) {
if (e.attribute("type") == "geometry" && !e.hasAttribute("fixed")) {
}
}
if (needInOutSync) {
- parameters.addParam("in", QString::number(cropStart().frames(m_fps)));
- parameters.addParam("out", QString::number((cropStart() + cropDuration()).frames(m_fps) - 1));
+ parameters.addParam("in", QString::number((int) cropStart().frames(m_fps)));
+ parameters.addParam("out", QString::number((int) (cropStart() + cropDuration()).frames(m_fps) - 1));
parameters.addParam("_sync_in_out", "1");
}
m_effectNames = m_effectList.effectNames().join(" / ");
else if (fade < 0) m_endFade = -fade;
if (m_selectedEffect == -1) {
- setSelectedEffect(0);
+ setSelectedEffect(1);
} else if (m_selectedEffect == ix - 1) setSelectedEffect(m_selectedEffect);
if (needRepaint) update(boundingRect());
/*if (animate) {
return parameters;
}
-void ClipItem::deleteEffect(QString index)
+void ClipItem::deleteEffect(const QString &index)
{
bool needRepaint = false;
int ix = index.toInt();
QDomElement effect = m_effectList.itemFromIndex(ix);
QString effectId = effect.attribute("id");
if ((effectId == "fadein" && hasEffect(QString(), "fade_from_black") == -1) ||
- (effectId == "fade_from_black" && hasEffect(QString(), "fadein") == -1)) {
+ (effectId == "fade_from_black" && hasEffect(QString(), "fadein") == -1)) {
m_startFade = 0;
needRepaint = true;
} else if ((effectId == "fadeout" && hasEffect(QString(), "fade_to_black") == -1) ||
- (effectId == "fade_to_black" && hasEffect(QString(), "fadeout") == -1)) {
+ (effectId == "fade_to_black" && hasEffect(QString(), "fadeout") == -1)) {
m_endFade = 0;
needRepaint = true;
} else if (EffectsList::hasKeyFrames(effect)) needRepaint = true;
setSelectedEffect(m_effectList.count());
} else setSelectedEffect(ix);
}
- if (needRepaint) update(boundingRect());
- else {
+ if (needRepaint) {
+ update(boundingRect());
+ } else {
QRectF r = boundingRect();
r.setHeight(20);
update(r);
int ClipItem::nextFreeEffectGroupIndex() const
{
int freeGroupIndex = 0;
- for (int i = 0; i < m_effectList.count(); i++) {
+ for (int i = 0; i < m_effectList.count(); ++i) {
QDomElement effect = m_effectList.at(i);
- EffectInfo effectInfo;
- effectInfo.fromString(effect.attribute("kdenlive_info"));
- if (effectInfo.groupIndex >= freeGroupIndex) {
- freeGroupIndex = effectInfo.groupIndex + 1;
- }
+ EffectInfo effectInfo;
+ effectInfo.fromString(effect.attribute("kdenlive_info"));
+ if (effectInfo.groupIndex >= freeGroupIndex) {
+ freeGroupIndex = effectInfo.groupIndex + 1;
+ }
}
return freeGroupIndex;
}
void ClipItem::dropEvent(QGraphicsSceneDragDropEvent * event)
{
if (event->proposedAction() == Qt::CopyAction && scene() && !scene()->views().isEmpty()) {
- const QString effects = QString::fromUtf8(event->mimeData()->data("kdenlive/effectslist"));
- event->acceptProposedAction();
- QDomDocument doc;
- doc.setContent(effects, true);
- QDomElement e = doc.documentElement();
- if (e.tagName() == "effectgroup") {
- // dropped an effect group
- QDomNodeList effectlist = e.elementsByTagName("effect");
- int freeGroupIndex = nextFreeEffectGroupIndex();
- EffectInfo effectInfo;
- for (int i = 0; i < effectlist.count(); i++) {
- QDomElement effect = effectlist.at(i).toElement();
- effectInfo.fromString(effect.attribute("kdenlive_info"));
- effectInfo.groupIndex = freeGroupIndex;
- effect.setAttribute("kdenlive_info", effectInfo.toString());
- effect.removeAttribute("kdenlive_ix");
- }
- } else {
- // single effect dropped
- e.removeAttribute("kdenlive_ix");
- }
- CustomTrackView *view = (CustomTrackView *) scene()->views()[0];
- if (view) view->slotAddEffect(e, m_info.startPos, track());
+ const QString effects = QString::fromUtf8(event->mimeData()->data("kdenlive/effectslist"));
+ event->acceptProposedAction();
+ QDomDocument doc;
+ doc.setContent(effects, true);
+ QDomElement e = doc.documentElement();
+ if (e.tagName() == "effectgroup") {
+ // dropped an effect group
+ QDomNodeList effectlist = e.elementsByTagName("effect");
+ int freeGroupIndex = nextFreeEffectGroupIndex();
+ EffectInfo effectInfo;
+ for (int i = 0; i < effectlist.count(); ++i) {
+ QDomElement effect = effectlist.at(i).toElement();
+ effectInfo.fromString(effect.attribute("kdenlive_info"));
+ effectInfo.groupIndex = freeGroupIndex;
+ effect.setAttribute("kdenlive_info", effectInfo.toString());
+ effect.removeAttribute("kdenlive_ix");
+ }
+ } else {
+ // single effect dropped
+ e.removeAttribute("kdenlive_ix");
+ }
+ CustomTrackView *view = (CustomTrackView *) scene()->views().first();
+ if (view) view->slotDropEffect(this, e, m_info.startPos, track());
}
else return;
}
{
if (isItemLocked()) event->setAccepted(false);
else if (event->mimeData()->hasFormat("kdenlive/effectslist")) {
- event->acceptProposedAction();
+ event->acceptProposedAction();
} else event->setAccepted(false);
}
m_audioOnly = force;
if (m_audioOnly) m_baseColor = QColor(141, 215, 166);
else {
- if (m_clipType == COLOR) {
+ if (m_clipType == Color) {
QString colour = m_clip->getProperty("colour");
colour = colour.replace(0, 2, "#");
m_baseColor = QColor(colour.left(7));
- } else if (m_clipType == AUDIO) m_baseColor = QColor(141, 215, 166);
+ } else if (m_clipType == Audio) m_baseColor = QColor(141, 215, 166);
else m_baseColor = QColor(141, 166, 215);
}
+ if (parentItem())
+ m_paintColor = m_baseColor.lighter(135);
+ else
+ m_paintColor = m_baseColor;
m_audioThumbCachePic.clear();
}
effect.setAttribute("active_keyframe", pos);
m_editedKeyframe = pos;
QDomNodeList params = effect.elementsByTagName("parameter");
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe")) {
QString kfr = e.attribute("keyframes");
QDomNodeList params = effect.elementsByTagName("parameter");
int start = cropStart().frames(m_fps);
int end = (cropStart() + cropDuration()).frames(m_fps) - 1;
- for (int i = 0; i < params.count(); i++) {
+ for (int i = 0; i < params.count(); ++i) {
QDomElement e = params.item(i).toElement();
if (!e.isNull() && (e.attribute("type") == "keyframe" || e.attribute("type") == "simplekeyframe")) {
QString kfr = e.attribute("keyframes");
return m_clip->getProducer(trackSpecific ? track : -1);
}
-QMap<int, QDomElement> ClipItem::adjustEffectsToDuration(int width, int height, ItemInfo oldInfo)
+QMap<int, QDomElement> ClipItem::adjustEffectsToDuration(int width, int height, const ItemInfo &oldInfo)
{
QMap<int, QDomElement> effects;
- for (int i = 0; i < m_effectList.count(); i++) {
+ for (int i = 0; i < m_effectList.count(); ++i) {
QDomElement effect = m_effectList.at(i);
if (effect.attribute("id").startsWith("fade")) {
}
if (effects.contains(i)) {
setFadeIn(out - in);
- }
+ }
} else {
if (out != clipEnd) {
effects[i] = effect.cloneNode().toElement();
if (in < cropStart().frames(m_fps)) {
if (!effects.contains(i))
effects[i] = effect.cloneNode().toElement();
- EffectsList::setParameter(effect, "in", QString::number(cropStart().frames(m_fps)));
+ EffectsList::setParameter(effect, "in", QString::number((int) cropStart().frames(m_fps)));
}
if (effects.contains(i))
setFadeOut(out - in);
EffectsList::setParameter(effect, "frame", QString::number(frame - diff));
continue;
} else if (effect.attribute("id") == "pan_zoom") {
- effect.setAttribute("in", cropStart().frames(m_fps));
- effect.setAttribute("out", (cropStart() + cropDuration()).frames(m_fps) - 1);
- }
+ effect.setAttribute("in", cropStart().frames(m_fps));
+ effect.setAttribute("out", (cropStart() + cropDuration()).frames(m_fps) - 1);
+ }
QDomNodeList params = effect.elementsByTagName("parameter");
for (int j = 0; j < params.count(); j++) {
const QStringList data = parameter.attribute("keyframes").split(';', QString::SkipEmptyParts);
QMap <int, double> keyframes;
foreach (QString keyframe, data) {
- int keyframepos = keyframe.section(':', 0, 0).toInt();
- // if keyframe was at clip start, update it
- if (keyframepos == oldin) {
- keyframepos = in;
- keyFrameUpdated = true;
- }
+ int keyframepos = keyframe.section(':', 0, 0).toInt();
+ // if keyframe was at clip start, update it
+ if (keyframepos == oldin) {
+ keyframepos = in;
+ keyFrameUpdated = true;
+ }
keyframes[keyframepos] = locale.toDouble(keyframe.section(':', 1, 1));
}
relPos = (out - lastPos) / (qreal)(i.key() - lastPos + 1);
keyframes[out] = lastValue + (i.value() - lastValue) * relPos;
}
- }
+ }
lastPos = i.key();
lastValue = i.value();
if (endFound)