m_timeLine(0),
m_startThumbRequested(false),
m_endThumbRequested(false),
- m_hover(false),
+ //m_hover(false),
m_speed(speed),
m_framePixelWidth(0)
{
- setZValue(1);
+ setZValue(2);
setRect(0, 0, (info.endPos - info.startPos).frames(fps) - 0.02, (double)(KdenliveSettings::trackheight() - 2));
setPos(info.startPos.frames(fps), (double)(info.track * KdenliveSettings::trackheight()) + 1);
m_audioThumbReady = clip->audioThumbCreated();
setFlags(QGraphicsItem::ItemClipsToShape | QGraphicsItem::ItemIsMovable | QGraphicsItem::ItemIsSelectable);
- setAcceptsHoverEvents(true);
+ //setAcceptsHoverEvents(true);
connect(this , SIGNAL(prepareAudioThumb(double, int, int, int)) , this, SLOT(slotPrepareAudioThumb(double, int, int, int)));
if (m_clipType == VIDEO || m_clipType == AV || m_clipType == SLIDESHOW || m_clipType == PLAYLIST) {
} else if (m_clipType == IMAGE || m_clipType == TEXT) {
setBrush(QColor(141, 166, 215));
m_startPix = KThumb::getImage(KUrl(clip->getProperty("resource")), (int)(KdenliveSettings::trackheight() * KdenliveSettings::project_display_ratio()), KdenliveSettings::trackheight());
- m_endPix = m_startPix;
} else if (m_clipType == AUDIO) {
setBrush(QColor(141, 215, 166));
connect(clip, SIGNAL(gotAudioData()), this, SLOT(slotGotAudioData()));
ClipItem::~ClipItem()
{
+ blockSignals(true);
+ if (m_clipType == VIDEO || m_clipType == AV || m_clipType == SLIDESHOW || m_clipType == PLAYLIST) {
+ disconnect(m_clip->thumbProducer(), SIGNAL(thumbReady(int, QPixmap)), this, SLOT(slotThumbReady(int, QPixmap)));
+ disconnect(m_clip, SIGNAL(gotAudioData()), this, SLOT(slotGotAudioData()));
+ }
delete m_startThumbTimer;
delete m_endThumbTimer;
delete m_timeLine;
QDomNodeList params = effect.elementsByTagName("parameter");
for (int i = 0; i < params.count(); i++) {
QDomElement e = params.item(i).toElement();
- kDebug() << "// inint eff: " << e.attribute("name");
+ kDebug() << "// init eff: " << e.attribute("name");
if (!e.isNull() && e.attribute("type") == "keyframe") {
QString def = e.attribute("default");
// Effect has a keyframe type parameter, we need to set the values
}
}
}
-
if (effect.attribute("tag") == "volume" || effect.attribute("tag") == "brightness") {
if (effect.attribute("id") == "fadeout" || effect.attribute("id") == "fade_to_black") {
int end = (duration() + cropStart()).frames(m_fps);
int start = end;
if (effect.attribute("id") == "fadeout") {
if (m_effectList.hasEffect(QString(), "fade_to_black") == -1) {
- start -= EffectsList::parameter(effect, "in").toInt();
+ int effectDuration = EffectsList::parameter(effect, "in").toInt();
+ if (effectDuration > cropDuration().frames(m_fps)) {
+ effectDuration = cropDuration().frames(m_fps) / 2;
+ }
+ start -= effectDuration;
} else {
QDomElement fadeout = m_effectList.getEffectByTag(QString(), "fade_to_black");
start -= EffectsList::parameter(fadeout, "out").toInt() - EffectsList::parameter(fadeout, "in").toInt();
}
} else if (effect.attribute("id") == "fade_to_black") {
if (m_effectList.hasEffect(QString(), "fadeout") == -1) {
- start -= EffectsList::parameter(effect, "in").toInt();
+ int effectDuration = EffectsList::parameter(effect, "in").toInt();
+ if (effectDuration > cropDuration().frames(m_fps)) {
+ effectDuration = cropDuration().frames(m_fps) / 2;
+ }
+ start -= effectDuration;
} else {
QDomElement fadeout = m_effectList.getEffectByTag(QString(), "fadeout");
start -= EffectsList::parameter(fadeout, "out").toInt() - EffectsList::parameter(fadeout, "in").toInt();
int start = cropStart().frames(m_fps);
int end = start;
if (effect.attribute("id") == "fadein") {
- if (m_effectList.hasEffect(QString(), "fade_from_black") == -1)
- end += EffectsList::parameter(effect, "out").toInt();
- else
+ if (m_effectList.hasEffect(QString(), "fade_from_black") == -1) {
+ int effectDuration = EffectsList::parameter(effect, "out").toInt();
+ if (effectDuration > cropDuration().frames(m_fps)) {
+ effectDuration = cropDuration().frames(m_fps) / 2;
+ }
+ end += effectDuration;
+ } else
end += EffectsList::parameter(m_effectList.getEffectByTag(QString(), "fade_from_black"), "out").toInt();
} else if (effect.attribute("id") == "fade_from_black") {
- if (m_effectList.hasEffect(QString(), "fadein") == -1)
- end += EffectsList::parameter(effect, "out").toInt();
- else
+ if (m_effectList.hasEffect(QString(), "fadein") == -1) {
+ int effectDuration = EffectsList::parameter(effect, "out").toInt();
+ if (effectDuration > cropDuration().frames(m_fps)) {
+ effectDuration = cropDuration().frames(m_fps) / 2;
+ }
+ end += effectDuration;
+ } else
end += EffectsList::parameter(m_effectList.getEffectByTag(QString(), "fadein"), "out").toInt();
}
EffectsList::setParameter(effect, "in", QString::number(start));
{
m_selectedEffect = ix;
QDomElement effect = effectAt(m_selectedEffect);
- QDomNodeList params = effect.elementsByTagName("parameter");
- if (effect.attribute("disabled") != "1")
- for (int i = 0; i < params.count(); i++) {
- QDomElement e = params.item(i).toElement();
- if (!e.isNull() && e.attribute("type") == "keyframe") {
- m_keyframes.clear();
- double max = e.attribute("max").toDouble();
- double min = e.attribute("min").toDouble();
- m_keyframeFactor = 100.0 / (max - min);
- m_keyframeDefault = e.attribute("default").toDouble();
- // parse keyframes
- const QStringList keyframes = e.attribute("keyframes").split(';', QString::SkipEmptyParts);
- foreach(const QString &str, keyframes) {
- int pos = str.section(':', 0, 0).toInt();
- double val = str.section(':', 1, 1).toDouble();
- m_keyframes[pos] = val;
+ if (effect.isNull() == false) {
+ QDomNodeList params = effect.elementsByTagName("parameter");
+ if (effect.attribute("disabled") != "1")
+ for (int i = 0; i < params.count(); i++) {
+ QDomElement e = params.item(i).toElement();
+ if (!e.isNull() && e.attribute("type") == "keyframe") {
+ m_keyframes.clear();
+ double max = e.attribute("max").toDouble();
+ double min = e.attribute("min").toDouble();
+ m_keyframeFactor = 100.0 / (max - min);
+ m_keyframeDefault = e.attribute("default").toDouble();
+ // parse keyframes
+ const QStringList keyframes = e.attribute("keyframes").split(';', QString::SkipEmptyParts);
+ foreach(const QString &str, keyframes) {
+ int pos = str.section(':', 0, 0).toInt();
+ double val = str.section(':', 1, 1).toDouble();
+ m_keyframes[pos] = val;
+ }
+ update();
+ return;
}
- update();
- return;
}
- }
+ }
if (!m_keyframes.isEmpty()) {
m_keyframes.clear();
update();
void ClipItem::slotThumbReady(int frame, QPixmap pix)
{
if (scene() == NULL) return;
- QRectF r = sceneBoundingRect();
- double width = m_startPix.width() / projectScene()->scale();
+ QRectF r = boundingRect();
+ double width = pix.width() / projectScene()->scale().x();
if (m_startThumbRequested && frame == cropStart().frames(m_fps)) {
m_startPix = pix;
m_startThumbRequested = false;
- double height = r.height();
- update(r.x(), r.y(), width, height);
+ update(r.left(), r.top(), width, pix.height());
} else if (m_endThumbRequested && frame == (cropStart() + cropDuration()).frames(m_fps) - 1) {
m_endPix = pix;
m_endThumbRequested = false;
- double height = r.height();
- update(r.right() - width, r.y(), width, height);
+ update(r.right() - width, r.y(), width, pix.height());
}
}
const double itemWidth = br.width();
const double itemHeight = br.height();
const double scale = option->matrix.m11();
+ const double vscale = option->matrix.m22();
const qreal xoffset = pen().widthF() / scale;
//painter->setRenderHints(QPainter::Antialiasing);
//painter->setClipPath(resultClipPath, Qt::IntersectClip);
// draw thumbnails
- painter->setMatrixEnabled(false);
if (KdenliveSettings::videothumbnails() && !isAudioOnly()) {
QPen pen = painter->pen();
pen.setColor(QColor(255, 255, 255, 150));
painter->setPen(pen);
- if (m_clipType == IMAGE && !m_startPix.isNull()) {
- QPointF p1 = painter->matrix().map(QPointF(itemWidth, 0)) - QPointF(m_startPix.width(), 0);
- QPointF p2 = painter->matrix().map(QPointF(itemWidth, itemHeight)) - QPointF(m_startPix.width(), 0);
- painter->drawPixmap(p1, m_startPix);
- QLineF l(p1, p2);
- painter->drawLine(l);
+ if ((m_clipType == IMAGE || m_clipType == TEXT) && !m_startPix.isNull()) {
+ double left = itemWidth - m_startPix.width() * vscale / scale;
+ QRectF pixrect(left, 0.0, m_startPix.width() * vscale / scale, m_startPix.height());
+ QRectF source(0.0, 0.0, (double) m_startPix.width(), (double) m_startPix.height());
+ painter->drawPixmap(pixrect, m_startPix, source);
+ QLineF l2(left, 0, left, m_startPix.height());
+ painter->drawLine(l2);
} else if (!m_endPix.isNull()) {
- QPointF p1 = painter->matrix().map(QPointF(itemWidth, 0)) - QPointF(m_endPix.width(), 0);
- QPointF p2 = painter->matrix().map(QPointF(itemWidth, itemHeight)) - QPointF(m_endPix.width(), 0);
- painter->drawPixmap(p1, m_endPix);
- QLineF l(p1, p2);
- painter->drawLine(l);
+ double left = itemWidth - m_endPix.width() * vscale / scale;
+ QRectF pixrect(left, 0.0, m_endPix.width() * vscale / scale, m_endPix.height());
+ QRectF source(0.0, 0.0, (double) m_endPix.width(), (double) m_endPix.height());
+ painter->drawPixmap(pixrect, m_endPix, source);
+ QLineF l2(left, 0, left, m_startPix.height());
+ painter->drawLine(l2);
}
if (!m_startPix.isNull()) {
- QPointF p1 = painter->matrix().map(QPointF(0, 0)) + QPointF(1.0, 0);
- QPointF p2 = painter->matrix().map(QPointF(0, itemHeight)) + QPointF(1.0, 0);
- painter->drawPixmap(p1, m_startPix);
- QLineF l2(p1.x() + m_startPix.width(), p1.y(), p2.x() + m_startPix.width(), p2.y());
+ double right = m_startPix.width() * vscale / scale;
+ QRectF pixrect(0.0, 0.0, right, m_startPix.height());
+ QRectF source(0.0, 0.0, (double) m_startPix.width(), (double) m_startPix.height());
+ painter->drawPixmap(pixrect, m_startPix, source);
+ QLineF l2(right, 0, right, m_startPix.height());
painter->drawLine(l2);
}
painter->setPen(Qt::black);
}
+ painter->setMatrixEnabled(false);
// draw audio thumbnails
if (KdenliveSettings::audiothumbnails() && m_speed == 1.0 && !isVideoOnly() && ((m_clipType == AV && (exposed.bottom() > (itemHeight / 2) || isAudioOnly())) || m_clipType == AUDIO) && m_audioThumbReady) {
if (m_timeLine && m_timeLine->state() == QTimeLine::Running) {
qreal value = m_timeLine->currentValue();
txtBounding.setWidth(txtBounding.width() * value);
- markerBrush.setColor(QColor(50 + 200 * (1.0 - value), 50, 50, 100 + 50 * value));
+ markerBrush.setColor(QColor(50 + 200 *(1.0 - value), 50, 50, 100 + 50 * value));
} else markerBrush.setColor(QColor(50, 50, 50, 150));
painter->setBrush(markerBrush);
painter->setPen(Qt::NoPen);
// draw transition handles on hover
- if (m_hover && itemWidth * scale > 40) {
+ /*if (m_hover && itemWidth * scale > 40) {
QPointF p1 = painter->matrix().map(QPointF(0, itemHeight / 2)) + QPointF(10, 0);
painter->drawPixmap(p1, projectScene()->m_transitionPixmap);
p1 = painter->matrix().map(QPointF(itemWidth, itemHeight / 2)) - QPointF(22, 0);
painter->drawPixmap(p1, projectScene()->m_transitionPixmap);
- }
+ }*/
// draw effect or transition keyframes
if (itemWidth > 20) drawKeyFrames(painter, exposed);
{
if (isItemLocked()) return NONE;
- if (isSelected()) {
+ if (isSelected() || (parentItem() && parentItem()->isSelected())) {
m_editedKeyframe = mouseOverKeyFrames(pos);
if (m_editedKeyframe != -1) return KEYFRAME;
}
QRectF rect = sceneBoundingRect();
- const double scale = projectScene()->scale();
+ const double scale = projectScene()->scale().x();
double maximumOffset = 6 / scale;
+ int addtransitionOffset = 10;
+ // Don't allow add transition if track height is very small
+ if (rect.height() < 30) addtransitionOffset = 0;
if (qAbs((int)(pos.x() - (rect.x() + m_startFade))) < maximumOffset && qAbs((int)(pos.y() - rect.y())) < 6) {
if (m_startFade == 0) setToolTip(i18n("Add audio fade"));
else setToolTip(i18n("Audio fade duration: %1s", GenTime(m_startFade, m_fps).seconds()));
return FADEIN;
- } else if (pos.x() - rect.x() < maximumOffset) {
+ } else if (pos.x() - rect.x() < maximumOffset && (rect.bottom() - pos.y() > addtransitionOffset)) {
setToolTip(i18n("Crop from start: %1s", cropStart().seconds()));
return RESIZESTART;
} else if (qAbs((int)(pos.x() - (rect.x() + rect.width() - m_endFade))) < maximumOffset && qAbs((int)(pos.y() - rect.y())) < 6) {
if (m_endFade == 0) setToolTip(i18n("Add audio fade"));
else setToolTip(i18n("Audio fade duration: %1s", GenTime(m_endFade, m_fps).seconds()));
return FADEOUT;
- } else if (qAbs((int)(pos.x() - (rect.x() + rect.width()))) < maximumOffset) {
+ } else if ((rect.right() - pos.x() < maximumOffset) && (rect.bottom() - pos.y() > addtransitionOffset)) {
setToolTip(i18n("Clip duration: %1s", duration().seconds()));
return RESIZEEND;
- } else if (qAbs((int)(pos.x() - (rect.x() + 16 / scale))) < maximumOffset && qAbs((int)(pos.y() - (rect.y() + rect.height() / 2 + 9))) < 6) {
+ } else if ((pos.x() - rect.x() < 16 / scale) && (rect.bottom() - pos.y() <= addtransitionOffset)) {
setToolTip(i18n("Add transition"));
return TRANSITIONSTART;
- } else if (qAbs((int)(pos.x() - (rect.x() + rect.width() - 21 / scale))) < maximumOffset && qAbs((int)(pos.y() - (rect.y() + rect.height() / 2 + 9))) < 6) {
+ } else if ((rect.right() - pos.x() < 16 / scale) && (rect.bottom() - pos.y() <= addtransitionOffset)) {
setToolTip(i18n("Add transition"));
return TRANSITIONEND;
}
//kDebug() << "// PREP AUDIO THMB FRMO : scale:" << pixelForOneFrame<< ", from: " << startpixel << ", to: " << endpixel;
//if ( (!audioThumbWasDrawn || framePixelWidth!=pixelForOneFrame ) && !baseClip()->audioFrameChache.isEmpty()){
- for (int startCache = startpixel - startpixel % 100;startCache < endpixel;startCache += 100) {
+ for (int startCache = startpixel - startpixel % 100; startCache < endpixel; startCache += 100) {
//kDebug() << "creating " << startCache;
//if (framePixelWidth!=pixelForOneFrame ||
if (m_framePixelWidth == pixelForOneFrame && m_audioThumbCachePic.contains(startCache))
int channelHeight = m_audioThumbCachePic[startCache].height() / channels;
- for (int i = 0;i < channels;i++) {
+ for (int i = 0; i < channels; i++) {
positiveChannelPaths[i].moveTo(0, channelHeight*i + channelHeight / 2);
negativeChannelPaths[i].moveTo(0, channelHeight*i + channelHeight / 2);
}
- for (int samples = 0;samples <= 100;samples++) {
+ for (int samples = 0; samples <= 100; samples++) {
double frame = (double)(samples + startCache - 0) / pixelForOneFrame;
int sample = (int)((frame - (int)(frame)) * 20); // AUDIO_FRAME_SIZE
if (frame < 0 || sample < 0 || sample > 19)
continue;
QMap<int, QByteArray> frame_channel_data = baseClip()->m_audioFrameCache[(int)frame];
- for (int channel = 0;channel < channels && frame_channel_data[channel].size() > 0;channel++) {
+ for (int channel = 0; channel < channels && frame_channel_data[channel].size() > 0; channel++) {
int y = channelHeight * channel + channelHeight / 2;
int delta = (int)(frame_channel_data[channel][sample] - 127 / 2) * channelHeight / 64;
negativeChannelPaths[channel].lineTo(samples, 0.1 + y - delta);
}
}
- for (int channel = 0;channel < channels ;channel++)
+ for (int channel = 0; channel < channels ; channel++)
if (fullAreaDraw && samples == 100) {
positiveChannelPaths[channel].lineTo(samples, channelHeight*channel + channelHeight / 2);
negativeChannelPaths[channel].lineTo(samples, channelHeight*channel + channelHeight / 2);
pixpainter.setPen(QPen(QColor(0, 0, 0)));
pixpainter.setBrush(QBrush(QColor(60, 60, 60)));
- for (int i = 0;i < channels;i++) {
+ for (int i = 0; i < channels; i++) {
if (fullAreaDraw) {
//pixpainter.fillPath(positiveChannelPaths[i].united(negativeChannelPaths[i]),QBrush(Qt::SolidPattern));//or singleif looks better
pixpainter.drawPath(positiveChannelPaths[i].united(negativeChannelPaths[i]));//or singleif looks better
QGraphicsRectItem::mouseReleaseEvent(event);
}
+/*
//virtual
-void ClipItem::hoverEnterEvent(QGraphicsSceneHoverEvent */*e*/)
+void ClipItem::hoverEnterEvent(QGraphicsSceneHoverEvent *e)
{
//if (e->pos().x() < 20) m_hover = true;
+ return;
if (isItemLocked()) return;
m_hover = true;
QRectF r = boundingRect();
- double width = 35 / projectScene()->scale();
+ double width = 35 / projectScene()->scale().x();
double height = r.height() / 2;
//WARNING: seems like it generates a full repaint of the clip, maybe not so good...
update(r.x(), r.y() + height, width, height);
if (isItemLocked()) return;
m_hover = false;
QRectF r = boundingRect();
- double width = 35 / projectScene()->scale();
+ double width = 35 / projectScene()->scale().x();
double height = r.height() / 2;
//WARNING: seems like it generates a full repaint of the clip, maybe not so good...
update(r.x(), r.y() + height, width, height);
update(r.right() - width, r.y() + height, width, height);
}
+*/
void ClipItem::resizeStart(int posx, double /*speed*/)
{
update(r);
}
if (m_selectedEffect == -1) {
- m_selectedEffect = 0;
- setSelectedEffect(m_selectedEffect);
+ setSelectedEffect(0);
}
return parameters;
}
QTextStream txtNeu(&neu);
if (values.size() > 0)
txtNeu << (int)values[0].toDouble();
- for (int i = 0;i < separators.size() && i + 1 < values.size();i++) {
+ for (int i = 0; i < separators.size() && i + 1 < values.size(); i++) {
txtNeu << separators[i];
txtNeu << (int)(values[i+1].toDouble());
}
}
}
m_effectNames = m_effectList.effectNames().join(" / ");
- if (needRepaint) update(boundingRect());
+ if (m_effectList.isEmpty() || m_selectedEffect - 1 == index.toInt()) {
+ // Current effect was removed
+ if (index.toInt() > m_effectList.count() - 1) {
+ setSelectedEffect(m_effectList.count() - 1);
+ } else setSelectedEffect(index.toInt());
+ }
+ if (needRepaint) update();
flashClip();
}