Skip to content

Commit

Permalink
Merge branch 'thread-safe'
Browse files Browse the repository at this point in the history
  • Loading branch information
Mattk70 committed Feb 14, 2024
2 parents 74359e0 + 590be4b commit ae567cf
Show file tree
Hide file tree
Showing 7 changed files with 89 additions and 49 deletions.
2 changes: 1 addition & 1 deletion Help/settings.html
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@
<td colspan="2" class="text-center text-bg-light"><h5>Audio Preferences</h5></td>
</tr>
<tr>
<td><b>Gain Adjustment</b></td>
<td><b>Volume Adjustment</b></td>
<td>If your recordings are very quiet, you can increase the loudness of the audio by adding gain. The volume of audio can be increased by up to 50 decibels.</td>
</tr>
<tr>
Expand Down
4 changes: 2 additions & 2 deletions index.html
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ <h5>Quick access settings panel</h5>
<ol class="text-start ps-5">
<li>Nocmig mode</li>
<li>Audio filters</li>
<li>Context-aware mode</li>
<li>Context-aware mode (Chirpity model only)</li>
<li>Fullscreen mode for the spectrogram</li>
<li>Which detection list to use</li>
<li>And the confidence threshold</li>
Expand Down Expand Up @@ -1060,7 +1060,7 @@ <h5 class="modal-title" id="locationModalLabel">Set Location</h5>
<span style="max-width: 70px" id="audioFiltersIcon" title="Experimental audio filters applied"
class="material-symbols-outlined btn btn-outline-secondary p-1 pt-2">blur_on</span>
<span style="max-width: 70px" id="context-mode" title="Context Aware mode enabled"
class="material-symbols-outlined btn btn-outline-secondary p-1 pt-2 text-warning">swap_horiz</span>
class="chirpity-only material-symbols-outlined btn btn-outline-secondary p-1 pt-2 text-warning">swap_horiz</span>
<span title="Toggle Fullscreen mode (Ctrl+F)" id="fullscreen"
class="material-symbols-outlined btn btn-outline-secondary p-1 pt-2">fullscreen</span>
<span style="max-width: 70px" id="list-icon" class="btn btn-outline-secondary p-1"><img
Expand Down
3 changes: 1 addition & 2 deletions js/BirdNet2.4.js
Original file line number Diff line number Diff line change
Expand Up @@ -320,9 +320,8 @@ class Model {
const finalPrediction = newPrediction || prediction;

const { indices, values } = tf.topk(finalPrediction, 5, true);
const topIndices = indices.arraySync();
const [topIndices, topValues] = await Promise.all([indices.array(), values.array()]).catch(err => console.log('Data transfer error:',err));
indices.dispose();
const topValues = values.arraySync();
values.dispose();
// end new
// const array_of_predictions = finalPrediction.arraySync()
Expand Down
11 changes: 5 additions & 6 deletions js/model.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ onmessage = async (e) => {
tf.env().set("TOPK_LAST_DIM_CPU_HANDOFF_SIZE_THRESHOLD", 0);
}
tf.enableProdMode();
//tf.enableDebugMode();
if (DEBUG) {
console.log(tf.env());
console.log(tf.env().getFlags());
Expand Down Expand Up @@ -291,11 +292,9 @@ class Model {

const finalPrediction = newPrediction || prediction;
const { indices, values } = tf.topk(finalPrediction, 5, true)
// For reasons I don't understand, the Promise.all approach is flakey: on occasion, not all predictions are returned
// const [topIndices, topValues] = await Promise.all([indices.array(), values.array()]).catch(err => console.log('Data transfer error:',err));
const topIndices = await indices.array();

const [topIndices, topValues] = await Promise.all([indices.array(), values.array()]).catch(err => console.log('Data transfer error:',err));
indices.dispose();
const topValues = await values.array();
values.dispose();

finalPrediction.dispose();
Expand Down Expand Up @@ -333,7 +332,7 @@ class Model {
return tf.tidy(() => {
const sigMax = tf.max(tensor, 1, true);
const sigMin = tf.min(tensor, 1, true);
const normalized = tensor.sub(sigMin).div(sigMax.sub(sigMin)).mul(tf.scalar(2)).sub(tf.scalar(1));
const normalized = tensor.sub(sigMin).divNoNan(sigMax.sub(sigMin)).mul(tf.scalar(2)).sub(tf.scalar(1));
return normalized;
})
}
Expand All @@ -346,7 +345,7 @@ class Model {
const sigMin = tf.min(signal);
const range = sigMax.sub(sigMin);
//return signal.sub(sigMin).div(range).mul(tf.scalar(8192.0, 'float32')).sub(tf.scalar(4095, 'float32'))
return signal.sub(sigMin).div(range).mul(tf.scalar(2)).sub(tf.scalar(1))
return signal.sub(sigMin).divNoNan(range).mul(tf.scalar(2)).sub(tf.scalar(1))
})
};
async predictChunk(audioBuffer, start, fileStart, file, threshold, confidence) {
Expand Down
24 changes: 15 additions & 9 deletions js/ui.js
Original file line number Diff line number Diff line change
Expand Up @@ -1355,7 +1355,16 @@ function updatePrefs() {
}
}


function fillDefaults(config, defaultConfig) {
Object.keys(defaultConfig).forEach(key => {
if (!(key in config)) {
config[key] = defaultConfig[key];
} else if (typeof config[key] === 'object' && typeof defaultConfig[key] === 'object') {
// Recursively fill in defaults for nested objects
fillDefaults(config[key], defaultConfig[key]);
}
});
}
///////////////////////// Window Handlers ////////////////////////////
let appPath, tempPath;
window.onload = async () => {
Expand All @@ -1369,7 +1378,7 @@ window.onload = async () => {
UUID: uuidv4(),
locale: 'en_uk',
colormap: 'inferno',
timeOfDay: false,
timeOfDay: true,
list: 'nocturnal',
local: true,
speciesThreshold: 0.03,
Expand Down Expand Up @@ -1404,11 +1413,8 @@ window.onload = async () => {
}

//fill in defaults - after updates add new items
Object.keys(defaultConfig).forEach(key => {
if (!(key in config)) {
config[key] = defaultConfig[key];
}
});
fillDefaults(config, defaultConfig);

// Update model if old models in config
if (!['chirpity', 'v3', 'v4', 'birdnet'].includes(config.model)) {
config.model = config.model === 'v2.4' ? 'birdnet' : 'chirpity';
Expand Down Expand Up @@ -4053,11 +4059,11 @@ DOM.gain.addEventListener('input', () => {


document.addEventListener('click', function (e) {
const target = e.target.closest('[id]').id;
const target = e.target.closest('[id]')?.id;
contextMenu.classList.add("d-none");
hideConfidenceSlider();
config.debug && console.log('clicked', target);
target !== 'result1' && track('UI', 'Click', target);
target && target !== 'result1' && track('UI', 'Click', target);
})


Expand Down
92 changes: 64 additions & 28 deletions js/worker.js
Original file line number Diff line number Diff line change
Expand Up @@ -818,7 +818,7 @@ const prepSummaryStatement = (included) => {

filesBeingProcessed = [...FILE_QUEUE];

for (let i = 0; i < NUM_WORKERS; i++) {
for (let i = 0; i < filesBeingProcessed.length; i++) {
processNextFile({ start: start, end: end, worker: i });
}
}
Expand Down Expand Up @@ -1314,14 +1314,13 @@ const prepSummaryStatement = (included) => {
return
}
const offlineCtx = await setupCtx(chunk, metadata[file].header);
let worker;
if (offlineCtx) {
offlineCtx.startRendering().then((resampled) => {
const myArray = resampled.getChannelData(0);

if (++workerInstance >= NUM_WORKERS) {
workerInstance = 0;
}
let worker = workerInstance;
workerInstance = ++workerInstance >= NUM_WORKERS ? 0 : workerInstance;
worker = workerInstance;
feedChunksToModel(myArray, chunkStart, file, end, worker);
chunkStart += WINDOW_SIZE * BATCH_SIZE * sampleRate;
// Now the async stuff is done ==>
Expand All @@ -1336,12 +1335,9 @@ const prepSummaryStatement = (included) => {
});
} else {
console.log('Short chunk', chunk.length, 'skipping')
if (worker === undefined) {
if (++workerInstance >= NUM_WORKERS) {
workerInstance = 0;
}
worker = workerInstance;
}
workerInstance = ++workerInstance >= NUM_WORKERS ? 0 : workerInstance;
worker = workerInstance;

// Create array with 0's (short segment of silence that will trigger the finalChunk flag
const myArray = new Float32Array(Array.from({length: chunkLength}).fill(0));
feedChunksToModel(myArray, chunkStart, file, end);
Expand All @@ -1350,6 +1346,7 @@ const prepSummaryStatement = (included) => {
})
readStream.on('end', function () {
readStream.close();
console.log('All chunks sent for ', file)
})
readStream.on('error', err => {
console.log(`readstream error: ${err}, start: ${start}, , end: ${end}, duration: ${metadata[file].duration}`);
Expand Down Expand Up @@ -1438,7 +1435,6 @@ const prepSummaryStatement = (included) => {
};
predictWorkers[worker].isAvailable = false;
predictWorkers[worker].postMessage(objData, [channelData.buffer]);

}

async function doPrediction({
Expand Down Expand Up @@ -1769,7 +1765,35 @@ const prepSummaryStatement = (included) => {
}
}


// Create a flag to indicate if parseMessage is currently being executed
let isParsing = false;

// Create a queue to hold messages while parseMessage is executing
const messageQueue = [];

// Function to process the message queue
const processQueue = async () => {
if (!isParsing && messageQueue.length > 0) {
// Set isParsing to true to prevent concurrent executions
isParsing = true;

// Get the first message from the queue
const message = messageQueue.shift();

// Parse the message
await parseMessage(message).catch(error => {
console.warn("Parse message error", error, 'message was', message);
});

// Set isParsing to false to allow the next message to be processed
isParsing = false;

// Process the next message in the queue
processQueue();
}
};


/// Workers From the MDN example5
function spawnPredictWorkers(model, list, batchSize, threads) {
NUM_WORKERS = threads;
Expand All @@ -1793,9 +1817,16 @@ const prepSummaryStatement = (included) => {
threshold: STATE.speciesThreshold,
worker: i
})
worker.onmessage = async (e) => {
await parseMessage(e)
}

// Web worker message event handler
worker.onmessage = (e) => {
// Push the message to the queue
messageQueue.push(e);
// if the message queue is getting too long, ease back on the calls to update summary?

// Process the queue
processQueue();
};
worker.onerror = (e) => {
console.warn(`Worker ${i} is suffering, shutting it down. THe error was:`, e)
predictWorkers.splice(i, 1);
Expand Down Expand Up @@ -1932,7 +1963,8 @@ const prepSummaryStatement = (included) => {
}

const generateInsertQuery = async (latestResult, file) => {
const db = STATE.db;
const db = STATE.db;
await db.runAsync('BEGIN');
let insertQuery = 'INSERT OR IGNORE INTO records VALUES ';
let fileID, changes;
let res = await db.getAsync('SELECT id FROM files WHERE name = ?', file);
Expand Down Expand Up @@ -1966,18 +1998,20 @@ const prepSummaryStatement = (included) => {
}
// Remove the trailing comma and space
insertQuery = insertQuery.slice(0, -2);
DEBUG && console.log(insertQuery);
//DEBUG && console.log(insertQuery);
// Make sure we have some values to INSERT
insertQuery.endsWith(')') && await db.runAsync(insertQuery);
insertQuery.endsWith(')') && await db.runAsync(insertQuery)
.catch(error => console.log("Database error:", error))
await db.runAsync('END');
return fileID
}

const parsePredictions = async (response) => {
let file = response.file;
const included = await getIncludedIDs(file);
const included = await getIncludedIDs(file).catch(error => console.log('Error getting included IDs', error));
const latestResult = response.result, db = STATE.db;
DEBUG && console.log('worker being used:', response.worker);
if (! STATE.selection) await generateInsertQuery(latestResult, file);
if (! STATE.selection) await generateInsertQuery(latestResult, file).catch(error => console.log('Error generating insert query', error));
let [keysArray, speciesIDBatch, confidenceBatch] = latestResult;
for (let i = 0; i < keysArray.length; i++) {
let updateUI = false;
Expand All @@ -2003,7 +2037,7 @@ const prepSummaryStatement = (included) => {
confidenceRequired = STATE.detect.confidence;
}
if (confidence >= confidenceRequired) {
const { cname } = await memoryDB.getAsync(`SELECT cname FROM species WHERE id = ${speciesID}`);
const { cname } = await memoryDB.getAsync(`SELECT cname FROM species WHERE id = ${speciesID}`).catch(error => console.log('Error getting species name', error));
const result = {
timestamp: timestamp,
position: key,
Expand All @@ -2013,7 +2047,7 @@ const prepSummaryStatement = (included) => {
score: confidence
}
sendResult(++index, result, false);
// Only show the highest confidence detection, unless it's a slection analysis
// Only show the highest confidence detection, unless it's a selection analysis
if (! STATE.selection) break;
};
}
Expand Down Expand Up @@ -2041,6 +2075,7 @@ const prepSummaryStatement = (included) => {
});
}
})
.catch(error => console.log('Error generating new result', error))
}
updateFilesBeingProcessed(response.file)
console.log(`File ${file} processed after ${(new Date() - predictionStart) / 1000} seconds: ${filesBeingProcessed.length} files to go`);
Expand Down Expand Up @@ -2075,9 +2110,10 @@ const prepSummaryStatement = (included) => {
case "prediction": {
if ( !aborted) {
predictWorkers[response.worker].isAvailable = true;
let worker = await parsePredictions(response);
DEBUG && console.log('predictions left for', response.file, predictionsReceived[response.file] - predictionsRequested[response.file])
if (predictionsReceived[response.file] === predictionsRequested[response.file]) {
let worker = await parsePredictions(response).catch(error => console.log('Error parsing predictions', error));
console.log('predictions left for', response.file, predictionsReceived[response.file] - predictionsRequested[response.file])
const remaining = predictionsReceived[response.file] - predictionsRequested[response.file]
if (remaining === 0) {
const limit = 10;
clearCache(CACHE_LOCATION, limit);
if (filesBeingProcessed.length) {
Expand Down Expand Up @@ -2271,7 +2307,7 @@ const prepSummaryStatement = (included) => {
const params = getSummaryParams(included);
const summary = await STATE.GET_SUMMARY_SQL.allAsync(...params);

DEBUG && console.log("Get Summary took", (Date.now() - t0) / 1000, "seconds");
//DEBUG && console.log("Get Summary took", (Date.now() - t0) / 1000, "seconds");
const event = interim ? 'update-summary' : 'summary-complate';
UI.postMessage({
event: event,
Expand Down Expand Up @@ -3038,7 +3074,7 @@ const prepSummaryStatement = (included) => {
await setIncludedIDs(lat,lon,week);
hitOrMiss = 'miss';
}
DEBUG && console.log(`Cache ${hitOrMiss}: setting the ${STATE.list} list took ${Date.now() -t0}ms`)
//DEBUG && console.log(`Cache ${hitOrMiss}: setting the ${STATE.list} list took ${Date.now() -t0}ms`)
return STATE.included[STATE.model][STATE.list];
}
}
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@
"ffmpeg-static-electron": "^2.0.3",
"fluent-ffmpeg": "^2.1.2",
"lodash.merge": "^4.6.2",
"sqlite3": "^5.1.6",
"sqlite3": "^5.1.7",
"suncalc": "^1.9.0",
"utimes": "^5.1.1",
"uuid": "^8.3.2",
Expand Down

0 comments on commit ae567cf

Please sign in to comment.