From ef98bf722df0d38cee568e86db756543ae6db291 Mon Sep 17 00:00:00 2001 From: Mattk70 Date: Tue, 13 Feb 2024 23:38:22 +0000 Subject: [PATCH] Bug fixes: Normalise audio for silent audio no longer errors out Applying defaults to nested keys. Changed default for timeline to show time of day Hide context mode filter panel icon when using BirdNET update sqilte3 to 5.1.7 --- index.html | 4 ++-- js/model.js | 11 ++++++----- js/ui.js | 24 +++++++++++++++--------- js/worker.js | 44 ++++++++++++++++++++++---------------------- package.json | 2 +- 5 files changed, 46 insertions(+), 39 deletions(-) diff --git a/index.html b/index.html index 3f050322..c5b4d467 100644 --- a/index.html +++ b/index.html @@ -85,7 +85,7 @@
Quick access settings panel
  1. Nocmig mode
  2. Audio filters
  3. -
  4. Context-aware mode
  5. +
  6. Context-aware mode (Chirpity model only)
  7. Fullscreen mode for the spectrogram
  8. Which detection list to use
  9. And the confidence threshold
  10. @@ -1060,7 +1060,7 @@ blur_on swap_horiz + class="chirpity-only material-symbols-outlined btn btn-outline-secondary p-1 pt-2 text-warning">swap_horiz fullscreen { tf.env().set("TOPK_LAST_DIM_CPU_HANDOFF_SIZE_THRESHOLD", 0); } tf.enableProdMode(); + //tf.enableDebugMode(); if (DEBUG) { console.log(tf.env()); console.log(tf.env().getFlags()); @@ -292,10 +293,10 @@ class Model { const finalPrediction = newPrediction || prediction; const { indices, values } = tf.topk(finalPrediction, 5, true) // For reasons I don't understand, the Promise.all approach is flakey: on occasion, not all predictions are returned - // const [topIndices, topValues] = await Promise.all([indices.array(), values.array()]).catch(err => console.log('Data transfer error:',err)); - const topIndices = await indices.array(); + const [topIndices, topValues] = await Promise.all([indices.array(), values.array()]).catch(err => console.log('Data transfer error:',err)); +//const topIndices = await indices.array(); indices.dispose(); - const topValues = await values.array(); +//const topValues = await values.array(); values.dispose(); finalPrediction.dispose(); @@ -333,7 +334,7 @@ class Model { return tf.tidy(() => { const sigMax = tf.max(tensor, 1, true); const sigMin = tf.min(tensor, 1, true); - const normalized = tensor.sub(sigMin).div(sigMax.sub(sigMin)).mul(tf.scalar(2)).sub(tf.scalar(1)); + const normalized = tensor.sub(sigMin).divNoNan(sigMax.sub(sigMin)).mul(tf.scalar(2)).sub(tf.scalar(1)); return normalized; }) } @@ -346,7 +347,7 @@ class Model { const sigMin = tf.min(signal); const range = sigMax.sub(sigMin); //return signal.sub(sigMin).div(range).mul(tf.scalar(8192.0, 'float32')).sub(tf.scalar(4095, 'float32')) - return signal.sub(sigMin).div(range).mul(tf.scalar(2)).sub(tf.scalar(1)) + return signal.sub(sigMin).divNoNan(range).mul(tf.scalar(2)).sub(tf.scalar(1)) }) }; async predictChunk(audioBuffer, start, fileStart, file, threshold, confidence) { diff --git a/js/ui.js b/js/ui.js index 59c5b34a..51c03944 100644 --- a/js/ui.js +++ b/js/ui.js @@ -1355,7 +1355,16 @@ function updatePrefs() { } } - +function fillDefaults(config, defaultConfig) { + Object.keys(defaultConfig).forEach(key => { + if (!(key in config)) { + config[key] = defaultConfig[key]; + } else if (typeof config[key] === 'object' && typeof defaultConfig[key] === 'object') { + // Recursively fill in defaults for nested objects + fillDefaults(config[key], defaultConfig[key]); + } + }); +} ///////////////////////// Window Handlers //////////////////////////// let appPath, tempPath; window.onload = async () => { @@ -1369,7 +1378,7 @@ window.onload = async () => { UUID: uuidv4(), locale: 'en_uk', colormap: 'inferno', - timeOfDay: false, + timeOfDay: true, list: 'nocturnal', local: true, speciesThreshold: 0.03, @@ -1404,11 +1413,8 @@ window.onload = async () => { } //fill in defaults - after updates add new items - Object.keys(defaultConfig).forEach(key => { - if (!(key in config)) { - config[key] = defaultConfig[key]; - } - }); + fillDefaults(config, defaultConfig); + // Update model if old models in config if (!['chirpity', 'v3', 'v4', 'birdnet'].includes(config.model)) { config.model = config.model === 'v2.4' ? 'birdnet' : 'chirpity'; @@ -4053,11 +4059,11 @@ DOM.gain.addEventListener('input', () => { document.addEventListener('click', function (e) { - const target = e.target.closest('[id]').id; + const target = e.target.closest('[id]')?.id; contextMenu.classList.add("d-none"); hideConfidenceSlider(); config.debug && console.log('clicked', target); - target !== 'result1' && track('UI', 'Click', target); + target && target !== 'result1' && track('UI', 'Click', target); }) diff --git a/js/worker.js b/js/worker.js index cd775a33..63a034d8 100644 --- a/js/worker.js +++ b/js/worker.js @@ -818,7 +818,7 @@ const prepSummaryStatement = (included) => { filesBeingProcessed = [...FILE_QUEUE]; - for (let i = 0; i < NUM_WORKERS; i++) { + for (let i = 0; i < filesBeingProcessed.length; i++) { processNextFile({ start: start, end: end, worker: i }); } } @@ -1314,14 +1314,13 @@ const prepSummaryStatement = (included) => { return } const offlineCtx = await setupCtx(chunk, metadata[file].header); + let worker; if (offlineCtx) { offlineCtx.startRendering().then((resampled) => { const myArray = resampled.getChannelData(0); - if (++workerInstance >= NUM_WORKERS) { - workerInstance = 0; - } - let worker = workerInstance; + workerInstance = ++workerInstance >= NUM_WORKERS ? 0 : workerInstance; + worker = workerInstance; feedChunksToModel(myArray, chunkStart, file, end, worker); chunkStart += WINDOW_SIZE * BATCH_SIZE * sampleRate; // Now the async stuff is done ==> @@ -1336,12 +1335,9 @@ const prepSummaryStatement = (included) => { }); } else { console.log('Short chunk', chunk.length, 'skipping') - if (worker === undefined) { - if (++workerInstance >= NUM_WORKERS) { - workerInstance = 0; - } - worker = workerInstance; - } + workerInstance = ++workerInstance >= NUM_WORKERS ? 0 : workerInstance; + worker = workerInstance; + // Create array with 0's (short segment of silence that will trigger the finalChunk flag const myArray = new Float32Array(Array.from({length: chunkLength}).fill(0)); feedChunksToModel(myArray, chunkStart, file, end); @@ -1350,6 +1346,7 @@ const prepSummaryStatement = (included) => { }) readStream.on('end', function () { readStream.close(); + console.log('All chunks sent for ', file) }) readStream.on('error', err => { console.log(`readstream error: ${err}, start: ${start}, , end: ${end}, duration: ${metadata[file].duration}`); @@ -1438,7 +1435,6 @@ const prepSummaryStatement = (included) => { }; predictWorkers[worker].isAvailable = false; predictWorkers[worker].postMessage(objData, [channelData.buffer]); - } async function doPrediction({ @@ -1794,7 +1790,9 @@ const prepSummaryStatement = (included) => { worker: i }) worker.onmessage = async (e) => { - await parseMessage(e) + await parseMessage(e).catch(error => { + console.warn("Parse message error", error, 'e was', e) + }) } worker.onerror = (e) => { console.warn(`Worker ${i} is suffering, shutting it down. THe error was:`, e) @@ -1966,7 +1964,7 @@ const prepSummaryStatement = (included) => { } // Remove the trailing comma and space insertQuery = insertQuery.slice(0, -2); - DEBUG && console.log(insertQuery); + //DEBUG && console.log(insertQuery); // Make sure we have some values to INSERT insertQuery.endsWith(')') && await db.runAsync(insertQuery); return fileID @@ -1974,10 +1972,10 @@ const prepSummaryStatement = (included) => { const parsePredictions = async (response) => { let file = response.file; - const included = await getIncludedIDs(file); + const included = await getIncludedIDs(file).catch(error => console.log('Error getting included IDs', error)); const latestResult = response.result, db = STATE.db; DEBUG && console.log('worker being used:', response.worker); - if (! STATE.selection) await generateInsertQuery(latestResult, file); + if (! STATE.selection) await generateInsertQuery(latestResult, file).catch(error => console.log('Error generating insert query', error)); let [keysArray, speciesIDBatch, confidenceBatch] = latestResult; for (let i = 0; i < keysArray.length; i++) { let updateUI = false; @@ -2003,7 +2001,7 @@ const prepSummaryStatement = (included) => { confidenceRequired = STATE.detect.confidence; } if (confidence >= confidenceRequired) { - const { cname } = await memoryDB.getAsync(`SELECT cname FROM species WHERE id = ${speciesID}`); + const { cname } = await memoryDB.getAsync(`SELECT cname FROM species WHERE id = ${speciesID}`).catch(error => console.log('Error getting species name', error)); const result = { timestamp: timestamp, position: key, @@ -2041,6 +2039,7 @@ const prepSummaryStatement = (included) => { }); } }) + .catch(error => console.log('Error generating new result', error)) } updateFilesBeingProcessed(response.file) console.log(`File ${file} processed after ${(new Date() - predictionStart) / 1000} seconds: ${filesBeingProcessed.length} files to go`); @@ -2075,9 +2074,10 @@ const prepSummaryStatement = (included) => { case "prediction": { if ( !aborted) { predictWorkers[response.worker].isAvailable = true; - let worker = await parsePredictions(response); - DEBUG && console.log('predictions left for', response.file, predictionsReceived[response.file] - predictionsRequested[response.file]) - if (predictionsReceived[response.file] === predictionsRequested[response.file]) { + let worker = await parsePredictions(response).catch(error => console.log('Error parsing predictions', error)); + console.log('predictions left for', response.file, predictionsReceived[response.file] - predictionsRequested[response.file]) + const remaining = predictionsReceived[response.file] - predictionsRequested[response.file] + if (remaining === 0) { const limit = 10; clearCache(CACHE_LOCATION, limit); if (filesBeingProcessed.length) { @@ -2271,7 +2271,7 @@ const prepSummaryStatement = (included) => { const params = getSummaryParams(included); const summary = await STATE.GET_SUMMARY_SQL.allAsync(...params); - DEBUG && console.log("Get Summary took", (Date.now() - t0) / 1000, "seconds"); + //DEBUG && console.log("Get Summary took", (Date.now() - t0) / 1000, "seconds"); const event = interim ? 'update-summary' : 'summary-complate'; UI.postMessage({ event: event, @@ -3038,7 +3038,7 @@ const prepSummaryStatement = (included) => { await setIncludedIDs(lat,lon,week); hitOrMiss = 'miss'; } - DEBUG && console.log(`Cache ${hitOrMiss}: setting the ${STATE.list} list took ${Date.now() -t0}ms`) + //DEBUG && console.log(`Cache ${hitOrMiss}: setting the ${STATE.list} list took ${Date.now() -t0}ms`) return STATE.included[STATE.model][STATE.list]; } } diff --git a/package.json b/package.json index 1a991194..2daeaa87 100644 --- a/package.json +++ b/package.json @@ -203,7 +203,7 @@ "ffmpeg-static-electron": "^2.0.3", "fluent-ffmpeg": "^2.1.2", "lodash.merge": "^4.6.2", - "sqlite3": "^5.1.6", + "sqlite3": "^5.1.7", "suncalc": "^1.9.0", "utimes": "^5.1.1", "uuid": "^8.3.2",