#include <QTimer>
#include <QStyleOptionGraphicsItem>
#include <QGraphicsScene>
-
+#include <QGraphicsView>
+#include <QScrollBar>
+#include <QMimeData>
+#include <QApplication>
#include <KDebug>
#include <mlt++/Mlt.h>
#include "clipitem.h"
+#include "customtrackview.h"
#include "renderer.h"
+#include "events.h"
#include "kdenlivesettings.h"
ClipItem::ClipItem(DocClipBase *clip, int track, int startpos, const QRectF & rect, int duration)
-: QGraphicsRectItem(rect), m_clip(clip), m_resizeMode(NONE), m_grabPoint(0), m_maxTrack(0), m_track(track), m_startPos(startpos), m_hasThumbs(false), startThumbTimer(NULL), endThumbTimer(NULL), m_startFade(0), m_endFade(0), m_effectsCounter(0),audioThumbWasDrawn(false)
+: QGraphicsRectItem(rect), m_clip(clip), m_resizeMode(NONE), m_grabPoint(0), m_maxTrack(0), m_track(track), m_startPos(startpos), m_hasThumbs(false), startThumbTimer(NULL), endThumbTimer(NULL), m_startFade(0), m_endFade(0), m_effectsCounter(0),audioThumbWasDrawn(false),audioThumbReady(false), m_opacity(1.0), m_timeLine(0)
{
//setToolTip(name);
kDebug()<<"******* CREATING NEW TML CLIP, DUR: "<<duration;
m_maxDuration = duration;
if (duration != -1) m_cropDuration = duration;
else m_cropDuration = m_maxDuration;
+ setAcceptDrops (true);
/*
m_cropStart = xml.attribute("in", 0).toInt();
else if (m_clipType == IMAGE) {
m_startPix = KThumb::getImage(KUrl(m_xml.attribute("resource")), 50 * KdenliveSettings::project_display_ratio(), 50);
}
+ else if (m_clipType == AUDIO) {
+ connect(clip, SIGNAL (gotAudioData()), this, SLOT (slotGotAudioData()));
+ }
}
}
void ClipItem::slotGotAudioData(){
+ audioThumbReady=true;
update();
}
return m_startPos + m_cropDuration;
}
+void ClipItem::flashClip()
+{
+ if (m_timeLine == 0) {
+ m_timeLine = new QTimeLine(750, this);
+ connect(m_timeLine, SIGNAL(valueChanged(qreal)), this, SLOT(animate(qreal)));
+ }
+ m_timeLine->start();
+}
+
+void ClipItem::animate(qreal value)
+{
+ m_opacity = value;
+ update();
+}
+
// virtual
void ClipItem::paint(QPainter *painter,
const QStyleOptionGraphicsItem *option,
QWidget *widget)
{
+ painter->setOpacity(m_opacity);
+ QBrush paintColor = brush();
+ if (isSelected()) paintColor = QBrush(QColor(150, 50, 100));
QRectF br = rect();
+ QRect rectInView;//this is the rect that is visible by the user
+ if (scene()->views().size()>0){
+ rectInView=scene()->views()[0]->viewport()->rect();
+ rectInView.moveTo(scene()->views()[0]->horizontalScrollBar()->value(),scene()->views()[0]->verticalScrollBar()->value());
+ rectInView.adjust(-10,-10,10,10);//make view rect 10 pixel greater on each site, or repaint after scroll event
+ //kDebug() << scene()->views()[0]->viewport()->rect() << " " << scene()->views()[0]->horizontalScrollBar()->value();
+ }
+ if (rectInView.isNull())
+ return;
+ QPainterPath clippath;
+ clippath.addRect(rectInView);
+ int startpixel=rectInView.x()-rect().x();//start and endpixel that is viewable from rect()
+ if (startpixel<0)
+ startpixel=0;
+ int endpixel=rectInView.width()+rectInView.x();
+ if (endpixel<0)
+ endpixel=0;
+
painter->setRenderHints(QPainter::Antialiasing);
QPainterPath roundRectPathUpper,roundRectPathLower;
double roundingY = 20;
roundRectPathLower.lineTo(br.x() + br .width() - offset, br.y()+ br.height()/2 - offset);
roundRectPathLower.closeSubpath();
- painter->setClipPath(roundRectPathUpper.united(roundRectPathLower), Qt::IntersectClip);
- //painter->fillPath(roundRectPath, brush()); //, QBrush(QColor(Qt::red)));
- painter->fillRect(br, brush());
+ painter->setClipPath(roundRectPathUpper.united(roundRectPathLower).intersected(clippath), Qt::IntersectClip);
+ //painter->fillPath(roundRectPath, brush()); //, QBrush(QColor(Qt::red)));
+ painter->fillRect(br.intersected(rectInView), paintColor);
//painter->fillRect(QRectF(br.x() + br.width() - m_endPix.width(), br.y(), m_endPix.width(), br.height()), QBrush(QColor(Qt::black)));
// draw thumbnails
QLineF l2(br.x() + m_startPix.width(), br.y(), br.x() + m_startPix.width(), br.y() + br.height());
painter->drawLine(l2);
}
- if (m_clipType == AV || m_clipType==AUDIO ){
- int startpixel=0;
- int endpixel=400;
+ if ( ( m_clipType == AV || m_clipType==AUDIO ||true) && audioThumbReady ){
+
QPainterPath path= m_clipType==AV ? roundRectPathLower : roundRectPathUpper.united(roundRectPathLower);
painter->fillPath(path,QBrush(QColor(200,200,200,127)));
int channels=2;
double pixelForOneFrame=(double)br.width()/duration();
- if (framePixelWidth!=pixelForOneFrame){
- emit prepareAudioThumb(pixelForOneFrame,path,startpixel,endpixel);
-
- /* re.x() changeds every time on move, so the path has to be rebuild every time
- QRectF re=path.boundingRect();
-
- if ( !baseClip()->audioFrameChache.isEmpty()){
-
- QMap<int,QPainterPath > positiveChannelPaths;
- QMap<int,QPainterPath > negativeChannelPaths;
-
- QPen audiopen;
- audiopen.setWidth(0);
- painter->setPen(audiopen);
-
- for (int i=0;i<channels;i++){
-
- positiveChannelPaths[i].moveTo(re.x(),re.y()+re.height()*i/channels+ (re.y()+re.height()/channels)/2);
- negativeChannelPaths[i].moveTo(re.x(),re.y()+re.height()*i/channels+ (re.y()+re.height()/channels)/2);
- }
-
- for (int samples=re.x();samples<re.x()+re.width();samples++){
- double frame=(double)(samples-re.x())/pixelForOneFrame;
- int sample=(frame-(int)(frame))*20 ;// AUDIO_FRAME_SIZE
- if (frame<0 || sample< 0 || sample>19 )
- continue;
- QMap<int,QByteArray> frame_channel_data=baseClip()->audioFrameChache[(int)frame];
-
- for (int channel=0;channel<channels && frame_channel_data[channel].size()> 0;channel++){
- int y=re.y()+re.height()*channel/channels+ (re.height()/channels)/2;
- positiveChannelPaths[channel].lineTo(samples,y+( (int)frame_channel_data[channel][sample] -127/2 ) * (re.height()/channels) / 64 );
- negativeChannelPaths[channel].lineTo(samples,y-( (int)frame_channel_data[channel][sample] -127/2 ) * (re.height()/channels) / 64 );
- }
- }
- for (int i=0;i<channels;i++){
- if (pixelForOneFrame<10){
- channelPaths[i]=positiveChannelPaths[i].united(negativeChannelPaths[i]);
- }else{
- channelPaths[i]=positiveChannelPaths[i];
- }
- }
- audioThumbWasDrawn=true;
- framePixelWidth=pixelForOneFrame;
- }
- */
- }
- for (int startCache=startpixel-startpixel%100;startCache<path.boundingRect().width() && startCache < endpixel;startCache+=100){
+
+ emit prepareAudioThumb(pixelForOneFrame,path,startpixel,endpixel+200);//200 more for less missing parts before repaint after scrolling
+
+ for (int startCache=startpixel-startpixel%100; startCache < endpixel+300;startCache+=100){
if (audioThumbCachePic.contains(startCache) && !audioThumbCachePic[startCache].isNull() )
painter->drawPixmap(path.boundingRect().x()+startCache,path.boundingRect().y(),audioThumbCachePic[startCache]);
}
- /*if ( channelPaths.size() )
- for (int i=0;i<channels;i++){
- painter->drawPath(channelPaths[i]);
- }*/
+
}
-
-
-
+
// draw start / end fades
double scale = br.width() / m_cropDuration;
QBrush fades;
painter->setFont(font);
}
+ pen.setColor(Qt::red);
+ //pen.setStyle(Qt::DashDotDotLine); //Qt::DotLine);
+ if (isSelected()) painter->setPen(pen);
+ painter->setClipRect(option->exposedRect);
+ painter->drawPath(roundRectPathUpper.united(roundRectPathLower).intersected(clippath));
+
QRectF txtBounding = painter->boundingRect(br, Qt::AlignCenter, " " + m_clipName + " ");
painter->fillRect(txtBounding, QBrush(QColor(255,255,255,150)));
painter->drawText(txtBounding, Qt::AlignCenter, m_clipName);
- pen.setColor(Qt::red);
- pen.setStyle(Qt::DashDotDotLine); //Qt::DotLine);
- if (isSelected()) painter->setPen(pen);
- painter->setClipRect(option->exposedRect);
- painter->drawPath(roundRectPathUpper.united(roundRectPathLower));
+
+ //painter->fillRect(startpixel,0,startpixel+endpixel,(int)br.height(), QBrush(QColor(255,255,255,150)));
//painter->fillRect(QRect(br.x(), br.y(), roundingX, roundingY), QBrush(QColor(Qt::green)));
/*QRectF recta(rect().x(), rect().y(), scale,rect().height());
QRectF re=path.boundingRect();
- if ( (!audioThumbWasDrawn || framePixelWidth!=pixelForOneFrame ) && !baseClip()->audioFrameChache.isEmpty()){
- for (int startCache=0;startCache<re.width() && startCache<endpixel ;startCache+=100){
-
+ //if ( (!audioThumbWasDrawn || framePixelWidth!=pixelForOneFrame ) && !baseClip()->audioFrameChache.isEmpty()){
+
+ for (int startCache=startpixel-startpixel%100;startCache+100<endpixel ;startCache+=100){
+ //kDebug() << "creating " << startCache;
+ //if (framePixelWidth!=pixelForOneFrame ||
+ if (framePixelWidth==pixelForOneFrame && audioThumbCachePic.contains(startCache))
+ continue;
if (audioThumbCachePic[startCache].isNull() || framePixelWidth!=pixelForOneFrame){
audioThumbCachePic[startCache]=QPixmap(100,re.height());
audioThumbCachePic[startCache].fill(QColor(200,200,200,127));
}
-
+ bool fullAreaDraw=pixelForOneFrame<10;
QMap<int,QPainterPath > positiveChannelPaths;
QMap<int,QPainterPath > negativeChannelPaths;
QPainter pixpainter(&audioThumbCachePic[startCache]);
audiopen.setWidth(0);
pixpainter.setPen(audiopen);
pixpainter.setRenderHint(QPainter::Antialiasing,true);
+ //pixpainter.drawLine(0,0,100,re.height());
+ int channelHeight=audioThumbCachePic[startCache].height()/channels;
+
for (int i=0;i<channels;i++){
- positiveChannelPaths[i].moveTo(0,0+audioThumbCachePic[startCache].height()*i/channels+ (audioThumbCachePic[startCache].height()/channels)/2);
- negativeChannelPaths[i].moveTo(0,0+audioThumbCachePic[startCache].height()*i/channels+ (audioThumbCachePic[startCache].height()/channels)/2);
+ positiveChannelPaths[i].moveTo(0,channelHeight*i+ channelHeight/2);
+ negativeChannelPaths[i].moveTo(0,channelHeight*i+ channelHeight/2);
}
- for (int samples=startCache;samples<startCache+100;samples++){
- double frame=(double)(samples-0)/pixelForOneFrame;
+ for (int samples=0;samples<=100;samples++){
+ double frame=(double)(samples+startCache-0)/pixelForOneFrame;
int sample=(frame-(int)(frame))*20 ;// AUDIO_FRAME_SIZE
if (frame<0 || sample< 0 || sample>19 )
continue;
for (int channel=0;channel<channels && frame_channel_data[channel].size()> 0;channel++){
- int y=audioThumbCachePic[startCache].height()*channel/channels+ (/*re.height()*/audioThumbCachePic[startCache].height()/channels)/2;
-
- positiveChannelPaths[channel].lineTo(samples,0.1+y+( (int)frame_channel_data[channel][sample] -127/2 ) * (audioThumbCachePic[startCache].height()/channels) / 64 );
- negativeChannelPaths[channel].lineTo(samples,0.1+y-( (int)frame_channel_data[channel][sample] -127/2 ) * (audioThumbCachePic[startCache].height()/channels) / 64 );
+ int y=channelHeight*channel+ channelHeight/2;
+ int delta=(int)(frame_channel_data[channel][sample] -127/2 ) * channelHeight/ 64;
+ if (fullAreaDraw){
+ positiveChannelPaths[channel].lineTo(samples,0.1+y+qAbs( delta ));
+ negativeChannelPaths[channel].lineTo(samples,0.1+y-qAbs( delta ));
+ }else{
+ positiveChannelPaths[channel].lineTo(samples,0.1+y+delta);
+ negativeChannelPaths[channel].lineTo(samples,0.1+y-delta);
+ }
}
}
for (int i=0;i<channels;i++){
- if (pixelForOneFrame<10){
+ if (fullAreaDraw){
pixpainter.fillPath(positiveChannelPaths[i].united(negativeChannelPaths[i]),QBrush(Qt::SolidPattern));//or singleif looks better
pixpainter.setBrush(QBrush(QColor(200,200,100,200)));
pixpainter.drawPath(positiveChannelPaths[i].united(negativeChannelPaths[i]));//or singleif looks better
pixpainter.drawPath(positiveChannelPaths[i]);
}
}
- audioThumbWasDrawn=true;
+ //audioThumbWasDrawn=true;
framePixelWidth=pixelForOneFrame;
- }
+ //}
}
int ClipItem::fadeIn() const
m_effectList.append(effect);
effectParams["tag"] = effect.attribute("tag");
effectParams["kdenlive_ix"] = effect.attribute("kdenlive_ix");
+ QString state = effect.attribute("disabled");
+ if (!state.isEmpty()) effectParams["disabled"] = state;
QDomNodeList params = effect.elementsByTagName("parameter");
for (int i = 0; i < params.count(); i++) {
QDomElement e = params.item(i).toElement();
if (!e.isNull())
effectParams[e.attribute("name")] = e.attribute("value");
}
+ flashClip();
update(boundingRect());
return effectParams;
}
QMap <QString, QString> effectParams;
effectParams["tag"] = effect.attribute("tag");
effectParams["kdenlive_ix"] = effect.attribute("kdenlive_ix");
+ QString state = effect.attribute("disabled");
+ if (!state.isEmpty()) effectParams["disabled"] = state;
QDomNodeList params = effect.elementsByTagName("parameter");
for (int i = 0; i < params.count(); i++) {
QDomElement e = params.item(i).toElement();
+ if (e.attribute("name").contains(";")){
+ QString format=e.attribute("format");
+ QStringList separators=format.split("%d",QString::SkipEmptyParts);
+ QStringList values=e.attribute("value").split(QRegExp("[,:;x]"));
+ QString neu;
+ QTextStream txtNeu(&neu);
+ if (values.size()>0)
+ txtNeu << (int)values[0].toDouble();
+ for (int i=0;i<separators.size() && i+1<values.size();i++){
+ txtNeu << separators[i];
+ txtNeu << (int)(values[i+1].toDouble());
+ }
+ effectParams["start"]=neu;
+ }else
if (!e.isNull())
effectParams[e.attribute("name")] = e.attribute("value");
}
break;
}
}
+ flashClip();
update(boundingRect());
}
+//virtual
+void ClipItem::dropEvent ( QGraphicsSceneDragDropEvent * event )
+{
+ QString effects = QString(event->mimeData()->data("kdenlive/effectslist"));
+ QDomDocument doc;
+ doc.setContent(effects, true);
+ QDomElement e = doc.documentElement();
+ CustomTrackView *view = (CustomTrackView *) scene()->views()[0];
+ if (view) view->slotAddEffect(e, GenTime(m_startPos, 25), m_track);
+}
+
+//virtual
+void ClipItem::dragEnterEvent(QGraphicsSceneDragDropEvent *event)
+{
+ event->setAccepted(event->mimeData()->hasFormat("kdenlive/effectslist"));
+}
+
+void ClipItem::dragLeaveEvent(QGraphicsSceneDragDropEvent *event)
+{
+ Q_UNUSED(event);
+}
// virtual
/*