Skip to content

Commit

Permalink
Replaced flakey Promise.all code in predictBatch. Added basic error h…
Browse files Browse the repository at this point in the history
…andling for predict workers.
  • Loading branch information
Mattk70 committed Feb 8, 2024
1 parent 8f275b1 commit 97a9f0b
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 25 deletions.
9 changes: 4 additions & 5 deletions js/BirdNet2.4.js
Original file line number Diff line number Diff line change
Expand Up @@ -325,12 +325,11 @@ class Model {
if (maskedTensorBatch) maskedTensorBatch.dispose();

const finalPrediction = newPrediction || prediction;
//new
const { indices, values } = tf.topk(finalPrediction, 5, true)
const [topIndices, topValues] = await Promise.all([indices.array(), values.array()]).catch((err => console.log(err)));
// const topIndices = indices.arraySync();
// const topValues = values.arraySync();

const { indices, values } = tf.topk(finalPrediction, 5, true);
const topIndices = await indices.array();
indices.dispose();
const topValues = await values.array();
values.dispose();
// end new
// const array_of_predictions = finalPrediction.arraySync()
Expand Down
39 changes: 19 additions & 20 deletions js/model.js
Original file line number Diff line number Diff line change
Expand Up @@ -68,22 +68,21 @@ onmessage = async (e) => {
break;
}
case "predict": {
if (myModel.model_loaded) {
const { chunks: chunks, start: start, fileStart: fileStart, file: file, snr: snr, confidence: confidence, context: context, resetResults: resetResults } = e.data;
myModel.useContext = context;
myModel.selection = !resetResults;
const [result,filename,startPosition] = await myModel.predictChunk(chunks, start, fileStart, file, snr, confidence / 1000);
response = {
message: "prediction",
file: filename,
result: result,
fileStart: startPosition,
worker: worker,
selection: myModel.selection
};
postMessage(response);
myModel.result = [];
}
if (! myModel.model_loaded) { console.log("worker", worker, "received a prediction request before it was ready") }
const { chunks: chunks, start: start, fileStart: fileStart, file: file, snr: snr, confidence: confidence, context: context, resetResults: resetResults } = e.data;
myModel.useContext = context;
myModel.selection = !resetResults;
const [result,filename,startPosition] = await myModel.predictChunk(chunks, start, fileStart, file, snr, confidence / 1000);
response = {
message: "prediction",
file: filename,
result: result,
fileStart: startPosition,
worker: worker,
selection: myModel.selection
};
postMessage(response);
myModel.result = [];
break;
}
case "get-spectrogram": {
Expand Down Expand Up @@ -292,11 +291,11 @@ class Model {

const finalPrediction = newPrediction || prediction;
const { indices, values } = tf.topk(finalPrediction, 5, true)
//const adjusted_values = tf.div(1, tf.add(1, tf.exp(tf.mul(tf.neg(10), values.sub(0.6)))));
const [topIndices, topValues] = await Promise.all([indices.array(), values.array()]).catch((err => console.log(err)));
// const topIndices = indices.arraySync();
// const topValues = await values.arraySync();
// For reasons I don't understand, the Promise.all approach is flakey: on occasion, not all predictions are returned
// const [topIndices, topValues] = await Promise.all([indices.array(), values.array()]).catch(err => console.log('Data transfer error:',err));
const topIndices = await indices.array();
indices.dispose();
const topValues = await values.array();
values.dispose();

finalPrediction.dispose();
Expand Down
5 changes: 5 additions & 0 deletions js/worker.js
Original file line number Diff line number Diff line change
Expand Up @@ -1797,6 +1797,11 @@ const prepSummaryStatement = (included) => {
worker.onmessage = async (e) => {
await parseMessage(e)
}
worker.onerror = (e) => {
console.warn(`Worker ${i} is suffering, shutting it down. THe error was:`, e)
predictWorkers.splice(i, 1);
worker.terminate();
}
}
}

Expand Down

0 comments on commit 97a9f0b

Please sign in to comment.