Skip to content
Extraits de code Groupes Projets
Valider 34138ffe rédigé par Elliu's avatar Elliu
Parcourir les fichiers

First quick take at improving spectrum visibility

Will need to connect properties and factors to sliders to test what is
best easily
parent e52d857a
Aucune branche associée trouvée
Aucune étiquette associée trouvée
Aucune requête de fusion associée trouvée
Pipeline #4226 réussi
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
using namespace Vivy; using namespace Vivy;
#define MAXPIXVALUE 7 // Some magix AV magic stuff #define PIXMULTIPLIER 4 // Some magic AV magic stuff
AudioVisualizer::AudioVisualizer(AudioContext::StreamPtr stream, VivyDocumentView &rootView, AudioVisualizer::AudioVisualizer(AudioContext::StreamPtr stream, VivyDocumentView &rootView,
QWidget *parent) QWidget *parent)
...@@ -19,10 +19,11 @@ AudioVisualizer::AudioVisualizer(AudioContext::StreamPtr stream, VivyDocumentVie ...@@ -19,10 +19,11 @@ AudioVisualizer::AudioVisualizer(AudioContext::StreamPtr stream, VivyDocumentVie
if (decodedData == nullptr) if (decodedData == nullptr)
throw std::logic_error("the passed stream is not decoded"); throw std::logic_error("the passed stream is not decoded");
const size_t size = audioStream->getDecodedDataSize(); const size_t size = audioStream->getDecodedDataSize();
const size_t height = audioStream->getDecodedChunkSize(); const size_t height = audioStream->getDecodedChunkSize();
const size_t decalage = audioStream->getDecodedDecalage(); const size_t halfHeight = size_t(height/2);
const size_t width = (size - height) / decalage; const size_t decalage = audioStream->getDecodedDecalage();
const size_t width = (size - height) / decalage;
uchar *pixels = uchar *pixels =
new uchar[static_cast<size_t>(width * height / 2 + (size - height) / decalage)](); new uchar[static_cast<size_t>(width * height / 2 + (size - height) / decalage)]();
...@@ -56,13 +57,31 @@ AudioVisualizer::AudioVisualizer(AudioContext::StreamPtr stream, VivyDocumentVie ...@@ -56,13 +57,31 @@ AudioVisualizer::AudioVisualizer(AudioContext::StreamPtr stream, VivyDocumentVie
const float re = chunkData[j * 2 + 1]; const float re = chunkData[j * 2 + 1];
const float mag = sqrtf(im * im + re * re); const float mag = sqrtf(im * im + re * re);
const size_t index = static_cast<size_t>(j * static_cast<ulong>(width) + x); const size_t index = static_cast<size_t>(j * static_cast<ulong>(width) + x);
pixels[index] = static_cast<unsigned char>((mag)*MAXPIXVALUE); pixels[index] = static_cast<unsigned char>(mag);
} }
} }
QImage img = QImage(pixels, static_cast<int>(width), static_cast<int>(height / 2), QImage img = QImage(int(width), int(halfHeight), QImage::Format_RGB888);
static_cast<int>(width), QImage::Format_Grayscale8, pixelsDeleter, pixels) parallel_for (int y = 0; y < int(halfHeight); y++){
.mirrored(false, true); int tempLine = y * int(width);
parallel_for (int x = 0; x < int(width); x++){
uint pixValue = pixels[tempLine + x];
// TODO: See if we could make this a function provided by the user if wanted, to adapt to different spectrums
uint pixValueAdjusted = uint(PIXMULTIPLIER * double(std::pow(pixValue/255.0, 0.3)) * 255);
uchar r,g,b;
b = uchar(std::min<uint>(255, pixValueAdjusted));
if (Q_UNLIKELY(b == 255)){
g = std::max<uchar>(0, uchar(std::min<uint>(255, pixValueAdjusted - 255)));
r = std::max<uchar>(0, uchar(std::min<uint>(255, pixValueAdjusted - 510)));
} else {
g = r = 0;
}
img.setPixel(x, y, qRgb(r, g, b));
}
}
img = img.mirrored(false, true);
printSpectrum(img, audioStream); printSpectrum(img, audioStream);
} }
...@@ -88,3 +107,5 @@ AudioVisualizer::printSpectrum(QImage pixmap, AudioContext::StreamPtr stream) no ...@@ -88,3 +107,5 @@ AudioVisualizer::printSpectrum(QImage pixmap, AudioContext::StreamPtr stream) no
connect(&rootVivyDocumentView, &VivyDocumentView::assSubDocumentChanged, timingScene, connect(&rootVivyDocumentView, &VivyDocumentView::assSubDocumentChanged, timingScene,
&TimingScene::rebuildScene); &TimingScene::rebuildScene);
} }
#undef PIXMULTIPLIER
...@@ -29,10 +29,6 @@ private: ...@@ -29,10 +29,6 @@ private:
if (ptr) if (ptr)
av_rdft_end(ptr); av_rdft_end(ptr);
}; };
static constexpr inline auto pixelsDeleter = [](void *ptr) noexcept -> void {
if (ptr)
delete[](reinterpret_cast<uchar *>(ptr));
};
using FFTSamplePtr = std::unique_ptr<FFTSample[], decltype(fftSampleDeleter)>; using FFTSamplePtr = std::unique_ptr<FFTSample[], decltype(fftSampleDeleter)>;
using RDFTContextPtr = std::unique_ptr<RDFTContext, decltype(rdftContextDeleter)>; using RDFTContextPtr = std::unique_ptr<RDFTContext, decltype(rdftContextDeleter)>;
......
0% Chargement en cours ou .
You are about to add 0 people to the discussion. Proceed with caution.
Terminez d'abord l'édition de ce message.
Veuillez vous inscrire ou vous pour commenter