Commit 316393b9 authored by Leander Schulten's avatar Leander Schulten
Browse files

Merge branch 'master' into feature/rtAudio

parents d3913136 f49e117a
Pipeline #190384 passed with stage
in 5 minutes and 38 seconds
......@@ -11,10 +11,13 @@ AudioCaptureManager::AudioCaptureManager():audiofft(sample.size())
}
void AudioCaptureManager::initCallback(int channels){
void AudioCaptureManager::initCallback(int channels, int samplesPerSecond) {
this->channels = channels;
if(GUI::Colorplot::getLast())
this->samplesPerSecond = samplesPerSecond;
this->samplesPerFrame = samplesPerSecond / 100;
if (GUI::Colorplot::getLast()) {
GUI::Colorplot::getLast()->setBlockSize(512);
}
}
void AudioCaptureManager::dataCallback(float* data, unsigned int frames, bool*done){
......@@ -39,13 +42,17 @@ void AudioCaptureManager::dataCallback(float* data, unsigned int frames, bool*do
{
// feed the *analysis classes with new samples
unsigned restFrames = frames;
if (restFrames % 441 != 0) {
ErrorNotifier::showError(QStringLiteral("The samples from the audio capture service does not have a length of 441 or x * 441. Can not analyse audio data."));
if (restFrames % samplesPerFrame != 0) {
static bool once = false;
if (!once) {
once = true;
ErrorNotifier::showError(QStringLiteral("The samples from the audio capture service does not have a length of %1 or x * %1. The length is %2. Can not analyse audio data.").arg(samplesPerFrame).arg(frames));
}
} else {
while (restFrames != 0) {
if (restFrames >= sample.size()) {
// we have to ignore some data
restFrames -= 441;
restFrames -= samplesPerFrame;
continue;
}
for (auto &[onsetFunction, pair] : onsetAnalyzes) {
......@@ -54,16 +61,16 @@ void AudioCaptureManager::dataCallback(float* data, unsigned int frames, bool*do
if (wasOnset) {
pair.second.addEvent(pair.first.getLastOnset());
}
pair.second.increaseNewestSampleBy(441);
pair.second.increaseNewestSampleBy(samplesPerFrame);
}
for (auto &[onsetFunction, pair] : tempoAnalyzes) {
bool wasBeat = pair.first.processNewSamples(sample.data() + sample.size() - restFrames);
if (wasBeat) {
pair.second.addEvent(pair.first.getLastBeat());
}
pair.second.increaseNewestSampleBy(441);
pair.second.increaseNewestSampleBy(samplesPerFrame);
}
restFrames -= 441;
restFrames -= samplesPerFrame;
}
}
}
......@@ -86,7 +93,7 @@ void AudioCaptureManager::dataCallback(float* data, unsigned int frames, bool*do
bool AudioCaptureManager::startCapturing(QString filePathToCaptureLibrary){
stopCapturingAndWait();
typedef int (*capture)(void(*)(int),void(*)(float*,unsigned int, bool*)) ;
typedef int (*capture)(void (*)(int, int), void (*)(float *, unsigned int, bool *));
auto func = reinterpret_cast<capture>(QLibrary::resolve(filePathToCaptureLibrary,"captureAudio"));
if(func){
captureAudioThread = std::thread([this,func](){
......@@ -100,23 +107,29 @@ bool AudioCaptureManager::startCapturing(QString filePathToCaptureLibrary){
return func;
}
const EventSeries &AudioCaptureManager::requestTempoAnalysis(Aubio::OnsetDetectionFunction f) {
const EventSeries *AudioCaptureManager::requestTempoAnalysis(Aubio::OnsetDetectionFunction f) {
if (samplesPerSecond < 0) {
return nullptr;
}
// check if already there
if (const auto i = tempoAnalyzes.find(f); i != tempoAnalyzes.end()) {
return i->second.second;
return &i->second.second;
}
// We need this ugly syntax, because we can not copy or move a EventRange object. See https://stackoverflow.com/a/25767752/10162645
return tempoAnalyzes.emplace(std::piecewise_construct, std::make_tuple(f), std::forward_as_tuple(std::piecewise_construct, std::forward_as_tuple(f, 1024, 441, 44100), std::forward_as_tuple(44100))).first->second.second;
return &tempoAnalyzes.emplace(std::piecewise_construct, std::make_tuple(f), std::forward_as_tuple(std::piecewise_construct, std::forward_as_tuple(f, 1024, samplesPerFrame, samplesPerSecond), std::forward_as_tuple(samplesPerSecond))).first->second.second;
// short: tempoAnalyzes.emplace(f, {Aubio::TempoAnalysis(f, 1024, 441, 44100), OnsetDataSeries(44100)});
}
const OnsetDataSeries &AudioCaptureManager::requestOnsetAnalysis(Aubio::OnsetDetectionFunction f) {
const OnsetDataSeries *AudioCaptureManager::requestOnsetAnalysis(Aubio::OnsetDetectionFunction f) {
if (samplesPerSecond < 0) {
return nullptr;
}
// check if already there
if (const auto i = onsetAnalyzes.find(f); i != onsetAnalyzes.end()) {
return i->second.second;
return &i->second.second;
}
// We need this ugly syntax, because we can not copy or move a EventRange object. See https://stackoverflow.com/a/25767752/10162645
return onsetAnalyzes.emplace(std::piecewise_construct, std::make_tuple(f), std::forward_as_tuple(std::piecewise_construct, std::forward_as_tuple(f, 1024, 441, 44100), std::forward_as_tuple(44100))).first->second.second;
return &onsetAnalyzes.emplace(std::piecewise_construct, std::make_tuple(f), std::forward_as_tuple(std::piecewise_construct, std::forward_as_tuple(f, 1024, samplesPerFrame, samplesPerSecond), std::forward_as_tuple(samplesPerSecond))).first->second.second;
// short: onsetAnalyzes.emplace(f, {Aubio::OnsetAnalysis(f, 1024, 441, 44100), OnsetDataSeries(44100)});
}
......
......@@ -36,6 +36,8 @@ class AudioCaptureManager : public QObject
std::atomic_bool run;
AudioFFT audiofft;
int channels = -1;
int samplesPerSecond = -1;
int samplesPerFrame = -1;
/**
* @brief tempoAnalyzes all tempo analyzes that were request by requestTempoAnalysis
*/
......@@ -54,9 +56,9 @@ private:
}
}
private:
static void staticInitCallback(int channels){get().initCallback(channels);}
static void staticInitCallback(int channels, int samplesPerSecond) { get().initCallback(channels, samplesPerSecond); }
static void staticDataCallback(float* data, unsigned int frames, bool*done){get().dataCallback(data,frames,done);}
void initCallback(int channels);
void initCallback(int channels, int samplesPerSecond);
void dataCallback(float* data, unsigned int frames, bool*done);
public:
bool startCapturing(QString filePathToCaptureLibrary);
......@@ -70,14 +72,14 @@ public:
* @param f the onset function that should be used
* @return the Event Series produced by the analysis object using the specific onset detection function
*/
const EventSeries &requestTempoAnalysis(Aubio::OnsetDetectionFunction f);
const EventSeries *requestTempoAnalysis(Aubio::OnsetDetectionFunction f);
/**
* @brief requestOnsetAnalysis requests the data series from a onset analysis that uses a spezific onset detection function
* You can call the function with the same parameters multiple times, the result will be the same
* @param f the onset function that should be used
* @return the Onset Data Series produced by the analysis object using the specific onset detection function
*/
const OnsetDataSeries &requestOnsetAnalysis(Aubio::OnsetDetectionFunction f);
const OnsetDataSeries *requestOnsetAnalysis(Aubio::OnsetDetectionFunction f);
public:
AudioCaptureManager(AudioCaptureManager const&) = delete;
......
......@@ -8,7 +8,9 @@ using namespace Audio::Aubio;
namespace GUI {
float AudioEventDataView::getX(const Audio::EventSeries &e, int sample) { return static_cast<float>(width()) - (static_cast<float>(e.getNewestSample()) - sample) / (e.getSamplesPerSecond() / pixelPerSecond); }
float AudioEventDataView::getX(const Audio::EventSeries *e, int sample) {
return static_cast<float>(width()) - (static_cast<float>(e->getNewestSample()) - sample) / (e->getSamplesPerSecond() / pixelPerSecond);
}
AudioEventDataView::AudioEventDataView(QQuickItem *parent) : QQuickItem(parent) {
setFlag(ItemHasContents);
......@@ -23,18 +25,26 @@ AudioEventDataView::AudioEventDataView(QQuickItem *parent) : QQuickItem(parent)
}
void AudioEventDataView::enableDetectionFor(OnsetDetectionFunction f, AudioEventDataView::DataType type, bool enabled) {
colors[to_integral(f)][type].first = enabled;
if (enabled) {
if (type == BeatEvent) {
if (beatData.find(f) == beatData.end()) {
beatData.emplace(f, Audio::AudioCaptureManager::get().requestTempoAnalysis(f));
auto p = Audio::AudioCaptureManager::get().requestTempoAnalysis(f);
if (!p) {
return;
}
beatData.emplace(f, p);
}
} else {
if (onsetData.find(f) == onsetData.end()) {
onsetData.emplace(f, Audio::AudioCaptureManager::get().requestOnsetAnalysis(f));
auto p = Audio::AudioCaptureManager::get().requestOnsetAnalysis(f);
if (!p) {
return;
}
onsetData.emplace(f, p);
}
}
}
colors[to_integral(f)][type].first = enabled;
}
bool AudioEventDataView::isDetectionEnabledFor(OnsetDetectionFunction onsetDetectionFunction, AudioEventDataView::DataType type) { return colors[to_integral(onsetDetectionFunction)][type].first; }
......@@ -90,7 +100,7 @@ QSGNode *AudioEventDataView::updatePaintNode(QSGNode *node, QQuickItem::UpdatePa
return gNode->geometry();
};
const auto fillEvents = [this](auto geometry, const auto &data) {
auto events = data.getEvents();
auto events = data->getEvents();
geometry->allocate(events->size() * 2);
auto vertexData = geometry->vertexDataAsPoint2D();
for (const auto &e : *events) {
......@@ -107,24 +117,24 @@ QSGNode *AudioEventDataView::updatePaintNode(QSGNode *node, QQuickItem::UpdatePa
for (auto &[f, data] : onsetData) {
if (isDetectionEnabledFor(f, OnsetValue)) {
QSGGeometry *geometry = getGeometry(getColor(f, OnsetValue));
const auto lockedData = data.getOnsetData();
const auto lockedData = data->getOnsetData();
geometry->allocate(lockedData->size());
auto vertexData = geometry->vertexDataAsPoint2D();
for (const auto &o : *lockedData) {
vertexData->x = getX(data, o.sample);
vertexData->y = height() - ((o.onsetValue / data.getMaxOnsetValue()) * (height() - 50));
vertexData->y = height() - ((o.onsetValue / data->getMaxOnsetValue()) * (height() - 50));
++vertexData;
}
geometry->setDrawingMode(QSGGeometry::DrawLineStrip);
}
if (isDetectionEnabledFor(f, ThresholdValue)) {
QSGGeometry *geometry = getGeometry(getColor(f, ThresholdValue));
const auto lockedData = data.getOnsetData();
const auto lockedData = data->getOnsetData();
geometry->allocate(lockedData->size());
auto vertexData = geometry->vertexDataAsPoint2D();
for (const auto &o : *lockedData) {
vertexData->x = getX(data, o.currentThreshold);
vertexData->y = height() - ((o.onsetValue / data.getMaxThreshold()) * (height() - 50));
vertexData->y = height() - ((o.onsetValue / data->getMaxThreshold()) * (height() - 50));
++vertexData;
}
geometry->setDrawingMode(QSGGeometry::DrawLineStrip);
......
......@@ -10,13 +10,13 @@ namespace GUI {
class AudioEventDataView : public QQuickItem {
Q_OBJECT
std::map<enum Audio::Aubio::OnsetDetectionFunction, const Audio::OnsetDataSeries &> onsetData;
std::map<enum Audio::Aubio::OnsetDetectionFunction, const Audio::EventSeries &> beatData;
std::map<enum Audio::Aubio::OnsetDetectionFunction, const Audio::OnsetDataSeries *> onsetData;
std::map<enum Audio::Aubio::OnsetDetectionFunction, const Audio::EventSeries *> beatData;
Q_PROPERTY(bool visibleForUser MEMBER visibleForUser NOTIFY visibleForUserChanged)
Q_PROPERTY(int pixelPerSecond MEMBER pixelPerSecond NOTIFY pixelPerSecondChanged)
int pixelPerSecond = 100;
bool visibleForUser = true;
float getX(const Audio::EventSeries &e, int sample);
float getX(const Audio::EventSeries *e, int sample);
public:
enum DataType { BeatEvent, OnsetEvent, OnsetValue, ThresholdValue, Last = ThresholdValue };
......
......@@ -68,7 +68,7 @@ Item{
x: parent.contentItem.x + 10
text: modelData.name
width: Math.max(implicitWidth+20,70)
enabled: (UserManagment.currentUser.havePermission(Permission.Admin) || modelData === UserManagment.currentUser)&&modelData!==UserManagment.getDefaultUser()
enabled: (UserManagment.currentUser.havePermission(Permission.Admin) || modelData === UserManagment.currentUser)&&modelData!==UserManagment.defaultUser
}
Button{
visible: UserManagment.currentUser.havePermission(Permission.Admin);
......@@ -100,7 +100,7 @@ Item{
anchors.bottom: parent.bottom
anchors.rightMargin: 5
onClicked: {
if(modelData===UserManagment.getDefaultUser()){
if(modelData===UserManagment.defaultUser){
UserManagment.logout();
}else if(modelData===UserManagment.currentUser){
UserManagment.logout()
......
......@@ -346,34 +346,6 @@ Item{
Layout.fillWidth: true
onHoveredChanged: if(!hovered && listView.currentModelData)listView.currentModelData.code = codeEditor.text
clip: true
Rectangle{
anchors.fill: codeEditor
anchors.topMargin: codeEditor.topPadding
TextMetrics{
font: codeEditor.font
text: "M"
id: textMetrics
}
Repeater{
model: codeEditorHelper.codeMarkups
Rectangle{
x: modelData.column * (textMetrics.width+1)
y: modelData.row * height
width: modelData.markupLength * (textMetrics.width+1)
height: codeEditor.lineHeight
color: modelData.error ? "red" : "orange"
MouseArea{
anchors.fill: parent
id: mouseArea
acceptedButtons: Qt.NoButton
hoverEnabled: true
}
ToolTip.text: modelData.message
ToolTip.visible: mouseArea.containsMouse
}
}
}
TextArea{
property real lineHeight: contentHeight/lineCount
font.family: "Liberation Mono"
......@@ -563,6 +535,35 @@ Item{
timer.start();
}
}
} // TextArea
// Must be behind TextArea because of https://bugreports.qt.io/browse/QTBUG-62292
Rectangle{
anchors.fill: codeEditor
anchors.topMargin: codeEditor.topPadding
TextMetrics{
font: codeEditor.font
text: "M"
id: textMetrics
}
Repeater{
model: codeEditorHelper.codeMarkups
Rectangle{
x: modelData.column * (textMetrics.width+1)
y: modelData.row * height
width: modelData.markupLength * (textMetrics.width+1)
height: codeEditor.lineHeight
color: modelData.error ? "red" : "orange"
MouseArea{
anchors.fill: parent
id: mouseArea
acceptedButtons: Qt.NoButton
hoverEnabled: true
}
ToolTip.text: modelData.message
ToolTip.visible: mouseArea.containsMouse
}
}
}
}
Button{
......
......@@ -2,7 +2,7 @@
#include <QCryptographicHash>
#include <QJsonArray>
UserManagment::UserManagment():readUser(new User("Default","")),currentUser(readUser){
UserManagment::UserManagment() : defaultUser(new User(QStringLiteral("Default"), "")), currentUser(defaultUser.get()) {
/*auto pass = QCryptographicHash::hash(QString("12345").toUtf8(),QCryptographicHash::Sha3_256);
qDebug()<<pass;
auto admin = new User("Admin",pass);
......@@ -121,12 +121,12 @@ void UserManagment::autoLoginUser(){
void UserManagment::logout(User *user){
if(currentUser==user){
currentUser = readUser;
currentUser = getDefaultUser();
emit currentUserChanged();
}
}
void UserManagment::logout(){
currentUser = readUser;
currentUser = getDefaultUser();
emit currentUserChanged();
}
......
......@@ -18,11 +18,12 @@ class UserManagment : public QObject
{
Q_OBJECT
Q_PROPERTY(User* currentUser READ getCurrentUser NOTIFY currentUserChanged)
Q_PROPERTY(User* defaultUser READ getCurrentUser CONSTANT)
Q_PROPERTY(User *defaultUser READ getDefaultUser CONSTANT)
Q_PROPERTY(QAbstractItemModel * users READ getUserModel CONSTANT)
Q_PROPERTY(QString currentOsUserName READ getCurrentOsUserName CONSTANT)
private:
User * readUser;
// User is an incomplete type here
std::unique_ptr<User> defaultUser;
User * currentUser;
QString currentOsUserName;
ModelVector<std::unique_ptr<User>> users;
......@@ -36,7 +37,7 @@ public:
User * getUserById(ID id){return getUserById(id.value());}
User * getUserById(ID::value_type id);
Q_INVOKABLE User * getDefaultUser()const{return readUser;}
[[nodiscard]] User *getDefaultUser() const { return defaultUser.get(); }
/**
* @brief get Return the Singletone of the UserManagment
* @return
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment