From ef98bf722df0d38cee568e86db756543ae6db291 Mon Sep 17 00:00:00 2001 From: Mattk70 Date: Tue, 13 Feb 2024 23:38:22 +0000 Subject: [PATCH 1/2] Bug fixes: Normalise audio for silent audio no longer errors out Applying defaults to nested keys. Changed default for timeline to show time of day Hide context mode filter panel icon when using BirdNET update sqilte3 to 5.1.7 --- index.html | 4 ++-- js/model.js | 11 ++++++----- js/ui.js | 24 +++++++++++++++--------- js/worker.js | 44 ++++++++++++++++++++++---------------------- package.json | 2 +- 5 files changed, 46 insertions(+), 39 deletions(-) diff --git a/index.html b/index.html index 3f050322..c5b4d467 100644 --- a/index.html +++ b/index.html @@ -85,7 +85,7 @@
Quick access settings panel
  1. Nocmig mode
  2. Audio filters
  3. -
  4. Context-aware mode
  5. +
  6. Context-aware mode (Chirpity model only)
  7. Fullscreen mode for the spectrogram
  8. Which detection list to use
  9. And the confidence threshold
  10. @@ -1060,7 +1060,7 @@ blur_on swap_horiz + class="chirpity-only material-symbols-outlined btn btn-outline-secondary p-1 pt-2 text-warning">swap_horiz fullscreen { tf.env().set("TOPK_LAST_DIM_CPU_HANDOFF_SIZE_THRESHOLD", 0); } tf.enableProdMode(); + //tf.enableDebugMode(); if (DEBUG) { console.log(tf.env()); console.log(tf.env().getFlags()); @@ -292,10 +293,10 @@ class Model { const finalPrediction = newPrediction || prediction; const { indices, values } = tf.topk(finalPrediction, 5, true) // For reasons I don't understand, the Promise.all approach is flakey: on occasion, not all predictions are returned - // const [topIndices, topValues] = await Promise.all([indices.array(), values.array()]).catch(err => console.log('Data transfer error:',err)); - const topIndices = await indices.array(); + const [topIndices, topValues] = await Promise.all([indices.array(), values.array()]).catch(err => console.log('Data transfer error:',err)); +//const topIndices = await indices.array(); indices.dispose(); - const topValues = await values.array(); +//const topValues = await values.array(); values.dispose(); finalPrediction.dispose(); @@ -333,7 +334,7 @@ class Model { return tf.tidy(() => { const sigMax = tf.max(tensor, 1, true); const sigMin = tf.min(tensor, 1, true); - const normalized = tensor.sub(sigMin).div(sigMax.sub(sigMin)).mul(tf.scalar(2)).sub(tf.scalar(1)); + const normalized = tensor.sub(sigMin).divNoNan(sigMax.sub(sigMin)).mul(tf.scalar(2)).sub(tf.scalar(1)); return normalized; }) } @@ -346,7 +347,7 @@ class Model { const sigMin = tf.min(signal); const range = sigMax.sub(sigMin); //return signal.sub(sigMin).div(range).mul(tf.scalar(8192.0, 'float32')).sub(tf.scalar(4095, 'float32')) - return signal.sub(sigMin).div(range).mul(tf.scalar(2)).sub(tf.scalar(1)) + return signal.sub(sigMin).divNoNan(range).mul(tf.scalar(2)).sub(tf.scalar(1)) }) }; async predictChunk(audioBuffer, start, fileStart, file, threshold, confidence) { diff --git a/js/ui.js b/js/ui.js index 59c5b34a..51c03944 100644 --- a/js/ui.js +++ b/js/ui.js @@ -1355,7 +1355,16 @@ function updatePrefs() { } } - +function fillDefaults(config, defaultConfig) { + Object.keys(defaultConfig).forEach(key => { + if (!(key in config)) { + config[key] = defaultConfig[key]; + } else if (typeof config[key] === 'object' && typeof defaultConfig[key] === 'object') { + // Recursively fill in defaults for nested objects + fillDefaults(config[key], defaultConfig[key]); + } + }); +} ///////////////////////// Window Handlers //////////////////////////// let appPath, tempPath; window.onload = async () => { @@ -1369,7 +1378,7 @@ window.onload = async () => { UUID: uuidv4(), locale: 'en_uk', colormap: 'inferno', - timeOfDay: false, + timeOfDay: true, list: 'nocturnal', local: true, speciesThreshold: 0.03, @@ -1404,11 +1413,8 @@ window.onload = async () => { } //fill in defaults - after updates add new items - Object.keys(defaultConfig).forEach(key => { - if (!(key in config)) { - config[key] = defaultConfig[key]; - } - }); + fillDefaults(config, defaultConfig); + // Update model if old models in config if (!['chirpity', 'v3', 'v4', 'birdnet'].includes(config.model)) { config.model = config.model === 'v2.4' ? 'birdnet' : 'chirpity'; @@ -4053,11 +4059,11 @@ DOM.gain.addEventListener('input', () => { document.addEventListener('click', function (e) { - const target = e.target.closest('[id]').id; + const target = e.target.closest('[id]')?.id; contextMenu.classList.add("d-none"); hideConfidenceSlider(); config.debug && console.log('clicked', target); - target !== 'result1' && track('UI', 'Click', target); + target && target !== 'result1' && track('UI', 'Click', target); }) diff --git a/js/worker.js b/js/worker.js index cd775a33..63a034d8 100644 --- a/js/worker.js +++ b/js/worker.js @@ -818,7 +818,7 @@ const prepSummaryStatement = (included) => { filesBeingProcessed = [...FILE_QUEUE]; - for (let i = 0; i < NUM_WORKERS; i++) { + for (let i = 0; i < filesBeingProcessed.length; i++) { processNextFile({ start: start, end: end, worker: i }); } } @@ -1314,14 +1314,13 @@ const prepSummaryStatement = (included) => { return } const offlineCtx = await setupCtx(chunk, metadata[file].header); + let worker; if (offlineCtx) { offlineCtx.startRendering().then((resampled) => { const myArray = resampled.getChannelData(0); - if (++workerInstance >= NUM_WORKERS) { - workerInstance = 0; - } - let worker = workerInstance; + workerInstance = ++workerInstance >= NUM_WORKERS ? 0 : workerInstance; + worker = workerInstance; feedChunksToModel(myArray, chunkStart, file, end, worker); chunkStart += WINDOW_SIZE * BATCH_SIZE * sampleRate; // Now the async stuff is done ==> @@ -1336,12 +1335,9 @@ const prepSummaryStatement = (included) => { }); } else { console.log('Short chunk', chunk.length, 'skipping') - if (worker === undefined) { - if (++workerInstance >= NUM_WORKERS) { - workerInstance = 0; - } - worker = workerInstance; - } + workerInstance = ++workerInstance >= NUM_WORKERS ? 0 : workerInstance; + worker = workerInstance; + // Create array with 0's (short segment of silence that will trigger the finalChunk flag const myArray = new Float32Array(Array.from({length: chunkLength}).fill(0)); feedChunksToModel(myArray, chunkStart, file, end); @@ -1350,6 +1346,7 @@ const prepSummaryStatement = (included) => { }) readStream.on('end', function () { readStream.close(); + console.log('All chunks sent for ', file) }) readStream.on('error', err => { console.log(`readstream error: ${err}, start: ${start}, , end: ${end}, duration: ${metadata[file].duration}`); @@ -1438,7 +1435,6 @@ const prepSummaryStatement = (included) => { }; predictWorkers[worker].isAvailable = false; predictWorkers[worker].postMessage(objData, [channelData.buffer]); - } async function doPrediction({ @@ -1794,7 +1790,9 @@ const prepSummaryStatement = (included) => { worker: i }) worker.onmessage = async (e) => { - await parseMessage(e) + await parseMessage(e).catch(error => { + console.warn("Parse message error", error, 'e was', e) + }) } worker.onerror = (e) => { console.warn(`Worker ${i} is suffering, shutting it down. THe error was:`, e) @@ -1966,7 +1964,7 @@ const prepSummaryStatement = (included) => { } // Remove the trailing comma and space insertQuery = insertQuery.slice(0, -2); - DEBUG && console.log(insertQuery); + //DEBUG && console.log(insertQuery); // Make sure we have some values to INSERT insertQuery.endsWith(')') && await db.runAsync(insertQuery); return fileID @@ -1974,10 +1972,10 @@ const prepSummaryStatement = (included) => { const parsePredictions = async (response) => { let file = response.file; - const included = await getIncludedIDs(file); + const included = await getIncludedIDs(file).catch(error => console.log('Error getting included IDs', error)); const latestResult = response.result, db = STATE.db; DEBUG && console.log('worker being used:', response.worker); - if (! STATE.selection) await generateInsertQuery(latestResult, file); + if (! STATE.selection) await generateInsertQuery(latestResult, file).catch(error => console.log('Error generating insert query', error)); let [keysArray, speciesIDBatch, confidenceBatch] = latestResult; for (let i = 0; i < keysArray.length; i++) { let updateUI = false; @@ -2003,7 +2001,7 @@ const prepSummaryStatement = (included) => { confidenceRequired = STATE.detect.confidence; } if (confidence >= confidenceRequired) { - const { cname } = await memoryDB.getAsync(`SELECT cname FROM species WHERE id = ${speciesID}`); + const { cname } = await memoryDB.getAsync(`SELECT cname FROM species WHERE id = ${speciesID}`).catch(error => console.log('Error getting species name', error)); const result = { timestamp: timestamp, position: key, @@ -2041,6 +2039,7 @@ const prepSummaryStatement = (included) => { }); } }) + .catch(error => console.log('Error generating new result', error)) } updateFilesBeingProcessed(response.file) console.log(`File ${file} processed after ${(new Date() - predictionStart) / 1000} seconds: ${filesBeingProcessed.length} files to go`); @@ -2075,9 +2074,10 @@ const prepSummaryStatement = (included) => { case "prediction": { if ( !aborted) { predictWorkers[response.worker].isAvailable = true; - let worker = await parsePredictions(response); - DEBUG && console.log('predictions left for', response.file, predictionsReceived[response.file] - predictionsRequested[response.file]) - if (predictionsReceived[response.file] === predictionsRequested[response.file]) { + let worker = await parsePredictions(response).catch(error => console.log('Error parsing predictions', error)); + console.log('predictions left for', response.file, predictionsReceived[response.file] - predictionsRequested[response.file]) + const remaining = predictionsReceived[response.file] - predictionsRequested[response.file] + if (remaining === 0) { const limit = 10; clearCache(CACHE_LOCATION, limit); if (filesBeingProcessed.length) { @@ -2271,7 +2271,7 @@ const prepSummaryStatement = (included) => { const params = getSummaryParams(included); const summary = await STATE.GET_SUMMARY_SQL.allAsync(...params); - DEBUG && console.log("Get Summary took", (Date.now() - t0) / 1000, "seconds"); + //DEBUG && console.log("Get Summary took", (Date.now() - t0) / 1000, "seconds"); const event = interim ? 'update-summary' : 'summary-complate'; UI.postMessage({ event: event, @@ -3038,7 +3038,7 @@ const prepSummaryStatement = (included) => { await setIncludedIDs(lat,lon,week); hitOrMiss = 'miss'; } - DEBUG && console.log(`Cache ${hitOrMiss}: setting the ${STATE.list} list took ${Date.now() -t0}ms`) + //DEBUG && console.log(`Cache ${hitOrMiss}: setting the ${STATE.list} list took ${Date.now() -t0}ms`) return STATE.included[STATE.model][STATE.list]; } } diff --git a/package.json b/package.json index 1a991194..2daeaa87 100644 --- a/package.json +++ b/package.json @@ -203,7 +203,7 @@ "ffmpeg-static-electron": "^2.0.3", "fluent-ffmpeg": "^2.1.2", "lodash.merge": "^4.6.2", - "sqlite3": "^5.1.6", + "sqlite3": "^5.1.7", "suncalc": "^1.9.0", "utimes": "^5.1.1", "uuid": "^8.3.2", From 590be4b2965692e14f25263a544c26ed578baa36 Mon Sep 17 00:00:00 2001 From: Mattk70 Date: Wed, 14 Feb 2024 17:33:33 +0000 Subject: [PATCH 2/2] Fixed issue with analysis not completing. Added a message queue so as not to overload the db. --- Help/settings.html | 2 +- js/BirdNet2.4.js | 3 +-- js/model.js | 4 +--- js/worker.js | 54 ++++++++++++++++++++++++++++++++++++++-------- 4 files changed, 48 insertions(+), 15 deletions(-) diff --git a/Help/settings.html b/Help/settings.html index ea8b4c4b..5e496159 100644 --- a/Help/settings.html +++ b/Help/settings.html @@ -111,7 +111,7 @@
    Audio Preferences
    - Gain Adjustment + Volume Adjustment If your recordings are very quiet, you can increase the loudness of the audio by adding gain. The volume of audio can be increased by up to 50 decibels. diff --git a/js/BirdNet2.4.js b/js/BirdNet2.4.js index 58b4bcfd..c25a7324 100644 --- a/js/BirdNet2.4.js +++ b/js/BirdNet2.4.js @@ -320,9 +320,8 @@ class Model { const finalPrediction = newPrediction || prediction; const { indices, values } = tf.topk(finalPrediction, 5, true); - const topIndices = indices.arraySync(); + const [topIndices, topValues] = await Promise.all([indices.array(), values.array()]).catch(err => console.log('Data transfer error:',err)); indices.dispose(); - const topValues = values.arraySync(); values.dispose(); // end new // const array_of_predictions = finalPrediction.arraySync() diff --git a/js/model.js b/js/model.js index 76d17c39..4f594782 100644 --- a/js/model.js +++ b/js/model.js @@ -292,11 +292,9 @@ class Model { const finalPrediction = newPrediction || prediction; const { indices, values } = tf.topk(finalPrediction, 5, true) - // For reasons I don't understand, the Promise.all approach is flakey: on occasion, not all predictions are returned + const [topIndices, topValues] = await Promise.all([indices.array(), values.array()]).catch(err => console.log('Data transfer error:',err)); -//const topIndices = await indices.array(); indices.dispose(); -//const topValues = await values.array(); values.dispose(); finalPrediction.dispose(); diff --git a/js/worker.js b/js/worker.js index 63a034d8..1e46ea3b 100644 --- a/js/worker.js +++ b/js/worker.js @@ -1765,7 +1765,35 @@ const prepSummaryStatement = (included) => { } } - + // Create a flag to indicate if parseMessage is currently being executed + let isParsing = false; + + // Create a queue to hold messages while parseMessage is executing + const messageQueue = []; + + // Function to process the message queue + const processQueue = async () => { + if (!isParsing && messageQueue.length > 0) { + // Set isParsing to true to prevent concurrent executions + isParsing = true; + + // Get the first message from the queue + const message = messageQueue.shift(); + + // Parse the message + await parseMessage(message).catch(error => { + console.warn("Parse message error", error, 'message was', message); + }); + + // Set isParsing to false to allow the next message to be processed + isParsing = false; + + // Process the next message in the queue + processQueue(); + } + }; + + /// Workers From the MDN example5 function spawnPredictWorkers(model, list, batchSize, threads) { NUM_WORKERS = threads; @@ -1789,11 +1817,16 @@ const prepSummaryStatement = (included) => { threshold: STATE.speciesThreshold, worker: i }) - worker.onmessage = async (e) => { - await parseMessage(e).catch(error => { - console.warn("Parse message error", error, 'e was', e) - }) - } + + // Web worker message event handler + worker.onmessage = (e) => { + // Push the message to the queue + messageQueue.push(e); + // if the message queue is getting too long, ease back on the calls to update summary? + + // Process the queue + processQueue(); + }; worker.onerror = (e) => { console.warn(`Worker ${i} is suffering, shutting it down. THe error was:`, e) predictWorkers.splice(i, 1); @@ -1930,7 +1963,8 @@ const prepSummaryStatement = (included) => { } const generateInsertQuery = async (latestResult, file) => { - const db = STATE.db; + const db = STATE.db; + await db.runAsync('BEGIN'); let insertQuery = 'INSERT OR IGNORE INTO records VALUES '; let fileID, changes; let res = await db.getAsync('SELECT id FROM files WHERE name = ?', file); @@ -1966,7 +2000,9 @@ const prepSummaryStatement = (included) => { insertQuery = insertQuery.slice(0, -2); //DEBUG && console.log(insertQuery); // Make sure we have some values to INSERT - insertQuery.endsWith(')') && await db.runAsync(insertQuery); + insertQuery.endsWith(')') && await db.runAsync(insertQuery) + .catch(error => console.log("Database error:", error)) + await db.runAsync('END'); return fileID } @@ -2011,7 +2047,7 @@ const prepSummaryStatement = (included) => { score: confidence } sendResult(++index, result, false); - // Only show the highest confidence detection, unless it's a slection analysis + // Only show the highest confidence detection, unless it's a selection analysis if (! STATE.selection) break; }; }