slotRenderZoneUpdated();
}
-void AbstractAudioScopeWidget::slotReceiveAudio(const QVector<int16_t> sampleData, const int freq, const int num_channels, const int num_samples)
+void AbstractAudioScopeWidget::slotReceiveAudio(const QVector<int16_t>& sampleData, int freq, int num_channels, int num_samples)
{
qDebug() << "Received audio. Size is " << (int) sampleData.size() << ".";
if (sampleData.size() > 0) {
//TODO
}
+void AbstractAudioScopeWidget::slotReceiveAudioTemp(const QByteArray arr)
+{
+ qDebug() << "Audio signal received";
+}
+
void AbstractAudioScopeWidget::slotResetRealtimeFactor(bool realtimeChecked)
{
if (!realtimeChecked) {
on whether it is currently visible and whether a calculation thread is already running. */
void slotRenderZoneUpdated();
void slotRenderZoneUpdated(QImage);//OLD
- void slotReceiveAudio(const QVector<int16_t> sampleData, const int freq, const int num_channels, const int num_samples); // NEW, TODO comment
+ void slotReceiveAudio(const QVector<int16_t>& sampleData, int freq, int num_channels, int num_samples); // NEW, TODO comment
+ void slotReceiveAudioTemp(const QByteArray arr);
/** The following slots are called when rendering of a component has finished. They e.g. update
the widget and decide whether to immediately restart the calculation thread. */
void slotHUDRenderingFinished(uint mseconds, uint accelerationFactor);
AbstractAudioScopeWidget(projMonitor, clipMonitor, true, parent)
{
init();
+ m_cfg = kiss_fftr_alloc(512, 0,0,0);
+}
+AudioSpectrum::~AudioSpectrum()
+{
+ free(m_cfg);
}
QString AudioSpectrum::widgetName() const { return QString("audiospectrum"); }
bool AudioSpectrum::isBackgroundDependingOnInput() const { return false; }
-bool AudioSpectrum::isScopeDependingOnInput() const { return false; }
+bool AudioSpectrum::isScopeDependingOnInput() const { return true; }
bool AudioSpectrum::isHUDDependingOnInput() const { return false; }
QImage AudioSpectrum::renderBackground(uint) { return QImage(); }
QImage AudioSpectrum::renderScope(uint accelerationFactor, const QVector<int16_t> audioFrame, const int freq, const int num_channels, const int num_samples)
{
+ float data[512];
+ kiss_fft_cpx freqData[512];
+ for (int i = 0; i < 512; i++) {
+ data[i] = (float) audioFrame.data()[i];
+ }
+ kiss_fftr(m_cfg, data, freqData);
+ qDebug() << freqData[0].r << " " << freqData[1].r << " " << freqData[2].r;
return QImage();
}
QImage AudioSpectrum::renderHUD(uint) { return QImage(); }
#include "abstractaudioscopewidget.h"
#include "ui_audiospectrum_ui.h"
+#include "tools/kiss_fftr.h"
class AudioSpectrum_UI;
public:
AudioSpectrum(Monitor *projMonitor, Monitor *clipMonitor, QWidget *parent = 0);
+ ~AudioSpectrum();
// Implemented virtual methods
QString widgetName() const;
private:
Ui::AudioSpectrum_UI *ui;
+ kiss_fftr_cfg m_cfg;
};
#endif // AUDIOSPECTRUM_H
if (m_clipMonitor) {
connect(m_clipMonitor->render, SIGNAL(showAudioSignal(const QByteArray&)), m_audiosignal, SLOT(showAudio(const QByteArray&)));
}
- //connect(m_histogramDock, SIGNAL(visibilityChanged(bool)), this, SLOT(slotUpdateScopeFrameRequest()));
- //connect(m_histogram, SIGNAL(requestAutoRefresh(bool)), this, SLOT(slotUpdateScopeFrameRequest()));
m_audioSpectrum = new AudioSpectrum(m_projectMonitor, m_clipMonitor);
m_audioSpectrumDock = new QDockWidget(i18n("AudioSpectrum"), this);
m_audioSpectrumDock->setObjectName(m_audioSpectrum->widgetName());
m_audioSpectrumDock->setWidget(m_audioSpectrum);
addDockWidget(Qt::TopDockWidgetArea, m_audioSpectrumDock);
+ bool b = true;
if (m_projectMonitor) {
- connect(m_projectMonitor->render, SIGNAL(audioSamplesSignal(QVector<int16_t>,int,int,int)),
- m_audioSpectrum, SLOT(slotReceiveAudio(QVector<int16_t>,int,int,int)));
+ qDebug() << "project monitor connected";
+ b &= connect(m_projectMonitor->render, SIGNAL(audioSamplesSignal(const QVector<int16_t>&,const int&,const int&, const int&)),
+ m_audioSpectrum, SLOT(slotReceiveAudio(const QVector<int16_t>&,const int&,const int&,const int&)));
+ b &= connect(m_projectMonitor->render, SIGNAL(showAudioSignal(const QByteArray&)),
+ m_audioSpectrum, SLOT(slotReceiveAudioTemp(const QByteArray&)));
}
if (m_clipMonitor) {
- connect(m_clipMonitor->render, SIGNAL(audioSamplesSignal(QVector<int16_t>,int,int,int)),
- m_audioSpectrum, SLOT(slotReceiveAudio(QVector<int16_t>,int,int,int)));
+ qDebug() << "clip monitor connected";
+ b &= connect(m_clipMonitor->render, SIGNAL(audioSamplesSignal(const QVector<int16_t>&,int,int,int)),
+ m_audioSpectrum, SLOT(slotReceiveAudio(const QVector<int16_t>&,int,int,int)));
}
+ Q_ASSERT(b);
m_undoViewDock = new QDockWidget(i18n("Undo History"), this);
m_undoViewDock->setObjectName("undo_history");
QVector<int16_t> sampleVector(samples);
memcpy(sampleVector.data(), data, samples*sizeof(int16_t));
qDebug() << samples << " samples. Freq=" << freq << ", channels=" << num_channels;
- qDebug() << sizeof(char) << " (c) " << sizeof(int16_t) << " (int16_t)";
- qDebug() << sampleVector.at(0);
if (!data)
return;
qDebug() << channels.size() << ": size.";
if (samples > 0) {
emit showAudioSignal(channels);
+ qDebug() << "Emitting audioSamplesSignal with " << samples << " samples.";
emit audioSamplesSignal(sampleVector, freq, num_channels, samples);
} else {
emit showAudioSignal(QByteArray());
+ qDebug() << "Not emitting audioSamplesSignal.";
}
}
void showAudioSignal(const QByteArray);
/** @brief The renderer refreshed the current frame, but no seeking was done. */
void frameUpdated(QImage);
- void audioSamplesSignal(const QVector<int16_t> data, const int freq, const int num_channels, const int num_samples);
+ void audioSamplesSignal(const QVector<int16_t>&, int freq, int num_channels, int num_samples);
public slots: