Skip to content

Commit

Permalink
Handle threads & batch size changes without respawning all threads
Browse files Browse the repository at this point in the history
Prevent threads/batch size changes during analysis
  • Loading branch information
Mattk70 committed Oct 13, 2024
1 parent e3a94b0 commit 92bf0a6
Show file tree
Hide file tree
Showing 4 changed files with 56 additions and 21 deletions.
5 changes: 4 additions & 1 deletion js/BirdNet2.4.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,10 @@ onmessage = async (e) => {
let response;
try {
switch (modelRequest) {

case 'change-batch-size': {
myModel.warmUp(e.data.batchSize);
break;
}
case "load": {
const version = e.data.model;
DEBUG && console.log("load request to worker");
Expand Down
4 changes: 4 additions & 0 deletions js/model.js
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,10 @@ onmessage = async (e) => {
let response;
try {
switch (modelRequest) {
case 'change-batch-size': {
myModel.warmUp(e.data.batchSize)
break;
}
case "load": {
loadModel(e.data)
break;
Expand Down
35 changes: 24 additions & 11 deletions js/ui.js
Original file line number Diff line number Diff line change
Expand Up @@ -4693,20 +4693,33 @@ DOM.gain.addEventListener('input', () => {
break;
}
case 'thread-slider': {
// number of threads
DOM.numberOfThreads.textContent = DOM.threadSlider.value;
config[config[config.model].backend].threads = DOM.threadSlider.valueAsNumber;
loadModel();
if (PREDICTING){
generateToast({message: 'It is not possible to change the number of threads while an analysis is underway', type:'warning'})
DOM.threadSlider.value = config[config[config.model].backend].threads
} else {
// change number of threads
DOM.numberOfThreads.textContent = DOM.threadSlider.value;
config[config[config.model].backend].threads = DOM.threadSlider.valueAsNumber;
worker.postMessage({action: 'change-threads', threads: DOM.threadSlider.valueAsNumber})
}
break;
}
case 'batch-size': {
DOM.batchSizeValue.textContent = BATCH_SIZE_LIST[DOM.batchSizeSlider.value].toString();
config[config[config.model].backend].batchSize = BATCH_SIZE_LIST[element.value];
// Need this in case a non-default batchsize was set, and then changed to 32
if (config[config.model].backend === 'tensorflow') config.tensorflow.batchSizeWasReset = true;
loadModel();
// Reset region maxLength
initRegion();
if (PREDICTING){
generateToast({message: 'It is not possible to change the batch size while an analysis is underway', type:'warning'})
const batch = config[config[config.model].backend].batchSize;
DOM.batchSizeSlider.value = BATCH_SIZE_LIST.indexOf(batch);
DOM.batchSizeValue.textContent = batch;
} else {
DOM.batchSizeValue.textContent = BATCH_SIZE_LIST[DOM.batchSizeSlider.value].toString();
config[config[config.model].backend].batchSize = BATCH_SIZE_LIST[element.value];
// Need this in case a non-default batchsize was set, and then changed to 32
if (config[config.model].backend === 'tensorflow') config.tensorflow.batchSizeWasReset = true;

worker.postMessage({action: 'change-batch-size', batchSize: BATCH_SIZE_LIST[element.value]})
// Reset region maxLength
initRegion();
}
break;
}
case 'colourmap': {
Expand Down
33 changes: 24 additions & 9 deletions js/worker.js
Original file line number Diff line number Diff line change
Expand Up @@ -398,6 +398,27 @@ async function handleMessage(e) {
await onAnalyse(args);
break;
}
case 'change-batch-size': {
BATCH_SIZE = args.batchSize;
predictWorkers.forEach(worker => {
worker.postMessage({message:'change-batch-size', batchSize: BATCH_SIZE})
})
break;
}
case 'change-threads': {
const threads = e.data.threads;
const delta = threads - predictWorkers.length;
NUM_WORKERS+=delta;
if (delta > 0) {
spawnPredictWorkers(STATE.model, STATE.list, BATCH_SIZE, delta)
} else {
for (let i = delta; i < 0; i++) {
const worker = predictWorkers.pop();
worker.terminate();
}
}
break;
}
case "change-mode": {
await onChangeMode(args.mode);
break;
Expand Down Expand Up @@ -594,7 +615,8 @@ async function onLaunch({model = 'chirpity', batchSize = 32, threads = 1, backen
await checkAndApplyUpdates(diskDB);
await createDB(); // now make the memoryDB
STATE.update({ db: memoryDB })
spawnPredictWorkers(model, list, batchSize, threads);
NUM_WORKERS = threads;
spawnPredictWorkers(model, list, batchSize, NUM_WORKERS);
}


Expand Down Expand Up @@ -2025,13 +2047,7 @@ const processQueue = async () => {
await parseMessage(message).catch( (error) => {
console.warn("Parse message error", error, 'message was', message);
});
// Dial down the getSummary calls if the queue length starts growing
// if (messageQueue.length > NUM_WORKERS * 2 ) {
// STATE.incrementor = Math.min(STATE.incrementor *= 2, 256);
// console.log('increased incrementor to ', STATE.incrementor)
// }



// Set isParsing to false to allow the next message to be processed
isParsing = false;

Expand All @@ -2043,7 +2059,6 @@ const processQueue = async () => {

/// Workers From the MDN example5
function spawnPredictWorkers(model, list, batchSize, threads) {
NUM_WORKERS = threads;
// And be ready to receive the list:
for (let i = 0; i < threads; i++) {
const workerSrc = model === 'v3' ? 'BirdNet' : model === 'birdnet' ? 'BirdNet2.4' : 'model';
Expand Down

0 comments on commit 92bf0a6

Please sign in to comment.