Skip to content

Commit

Permalink
purge all examples of sending a result as a string advising of no det…
Browse files Browse the repository at this point in the history
…ections

disable all settings options during analysis if they shouldn't be changed during analysis
  • Loading branch information
Mattk70 committed Nov 9, 2024
1 parent 5664376 commit 2da8d9b
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 61 deletions.
2 changes: 1 addition & 1 deletion Help/keyboard.html
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@
<td>Skip back to the previous detection</td>
</tr>
<tr>
<td colspan="2" class="text-center text-bg-light"><h5>Spectrogram</h5></td>
<td colspan="2" class="text-center text-bg-light"><h5>Spectrogram Adjustments</h5></td>
</tr>
<tr>
<td><kbd>+</kbd></td>
Expand Down
2 changes: 2 additions & 0 deletions js/DOMcache.js

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

92 changes: 40 additions & 52 deletions js/ui.js
Original file line number Diff line number Diff line change
Expand Up @@ -1042,7 +1042,7 @@ function postAnalyseMessage(args) {
const selection = !!args.end;
const filesInScope = args.filesInScope;
PREDICTING = true;
disableSettingsListElements(true)
disableSettingsDuringAnalysis(true)
if (!selection) {
analyseReset();
refreshResultsView();
Expand Down Expand Up @@ -2122,8 +2122,6 @@ function handleGesture(e) {
}




document.addEventListener('change', function (e) {
const target = e.target;
const context = target.parentNode.classList.contains('chart') ? 'chart' : 'explore';
Expand Down Expand Up @@ -2773,7 +2771,7 @@ function centreSpec(){
if (PREDICTING) {
console.log('Operation aborted');
PREDICTING = false;
disableSettingsListElements(true);
disableSettingsDuringAnalysis(true);
STATE.analysisDone = true;
worker.postMessage({
action: 'abort',
Expand Down Expand Up @@ -2886,13 +2884,21 @@ function centreSpec(){
} : undefined;
}

function disableSettingsListElements(bool){
function disableSettingsDuringAnalysis(bool){
DOM.modelToUse.disabled = bool;
DOM.threadSlider.disabled = bool;
DOM.batchSizeSlider.disabled = bool;
DOM.locale.disabled = bool;
DOM.listToUse.disabled = bool;
DOM.customListContainer.disabled = bool;
DOM.localSwitchContainer.disabled = bool
DOM.speciesThreshold.disabled = bool;
DOM.speciesWeek.disabled = bool;
DOM.backendOptions.forEach(backend => backend.disabled = bool)
DOM.contextAware.disabled = bool;
DOM.sendFilteredAudio.disabled = bool;
}

const postBufferUpdate = ({
file = STATE.currentFile,
begin = 0,
Expand Down Expand Up @@ -3160,7 +3166,7 @@ function centreSpec(){
*/
function onResultsComplete({active = undefined, select = undefined} = {}){
PREDICTING = false;
disableSettingsListElements(false)
disableSettingsDuringAnalysis(false)
DOM.resultTable.replaceWith(resultsBuffer);
const table = DOM.resultTable;
showElement(['resultTableContainer', 'resultsHead'], false);
Expand Down Expand Up @@ -3225,7 +3231,7 @@ function formatDuration(seconds){

function onAnalysisComplete({quiet}){
PREDICTING = false;
disableSettingsListElements(false)
disableSettingsDuringAnalysis(false)
STATE.analysisDone = true;
STATE.diskHasRecords && enableMenuItem(['explore', 'charts']);
DOM.progressDiv.classList.add('invisible');
Expand Down Expand Up @@ -3836,6 +3842,10 @@ function formatDuration(seconds){


const toggleContextAwareMode = () => {
if (PREDICTING){
generateToast({message: 'It is not possible to change the context-mode settings while an analysis is underway.', type:'warning'})
return;
}
if (config.model !== 'birdnet') config.detect.contextAware = !config.detect.contextAware;
DOM.contextAware.checked = config.detect.contextAware;
contextAwareIconDisplay();
Expand Down Expand Up @@ -4469,12 +4479,7 @@ function playRegion(){
case 'tensorflow':
case 'webgl':
case 'webgpu':{
if (PREDICTING){
generateToast({message: 'It is not possible to change the model backend while an analysis is underway', type:'warning'})
document.getElementById(config[config.model].backend).checked = true;
} else {
handleBackendChange(target);
}
handleBackendChange(target);
break;
}

Expand Down Expand Up @@ -4554,7 +4559,7 @@ function playRegion(){
break;
}
case 'audioFiltersIcon': { toggleFilters(); break }
//case 'context-mode': { toggleContextAwareMode(); break }
case 'context-mode': { toggleContextAwareMode(); break }
case 'frequency-range': {
document.getElementById('frequency-range-panel').classList.toggle('d-none');
document.getElementById('frequency-range').classList.toggle('active');
Expand Down Expand Up @@ -4681,24 +4686,19 @@ function playRegion(){
config.list === 'custom' || updateList()
break }
case 'locale': {
if (PREDICTING){
generateToast({message: 'It is not possible to change the language while an analysis is underway', type:'warning'})
DOM.locale.value = config[config.model].locale
} else{
let labelFile;
if (element.value === 'custom'){
labelFile = config.customListFile[config.model];
if (! labelFile) {
generateToast({type: 'warning', message: 'You must select a label file in the list settings to use the custom language option.'});
return;
}
} else {
const chirpity = element.value === 'en_uk' && config.model !== 'birdnet' ? 'chirpity' : '';
labelFile = `labels/V2.4/BirdNET_GLOBAL_6K_V2.4_${chirpity}Labels_${element.value}.txt`;
let labelFile;
if (element.value === 'custom'){
labelFile = config.customListFile[config.model];
if (! labelFile) {
generateToast({type: 'warning', message: 'You must select a label file in the list settings to use the custom language option.'});
return;
}
config[config.model].locale = element.value;
readLabels(labelFile, 'locale');
} else {
const chirpity = element.value === 'en_uk' && config.model !== 'birdnet' ? 'chirpity' : '';
labelFile = `labels/V2.4/BirdNET_GLOBAL_6K_V2.4_${chirpity}Labels_${element.value}.txt`;
}
config[config.model].locale = element.value;
readLabels(labelFile, 'locale');
break }
case 'local': {
config.local = element.checked;
Expand All @@ -4716,30 +4716,18 @@ function playRegion(){
handleBackendChange(config[config.model].backend);
setListUIState(config.list)
break }
case 'thread-slider': {
if (PREDICTING){
generateToast({message: 'It is not possible to change the number of threads while an analysis is underway', type:'warning'})
DOM.threadSlider.value = config[config[config.model].backend].threads
} else {
// change number of threads
DOM.numberOfThreads.textContent = DOM.threadSlider.value;
config[config[config.model].backend].threads = DOM.threadSlider.valueAsNumber;
worker.postMessage({action: 'change-threads', threads: DOM.threadSlider.valueAsNumber})
}
case 'thread-slider': {
// change number of threads
DOM.numberOfThreads.textContent = DOM.threadSlider.value;
config[config[config.model].backend].threads = DOM.threadSlider.valueAsNumber;
worker.postMessage({action: 'change-threads', threads: DOM.threadSlider.valueAsNumber})
break }
case 'batch-size': {
if (PREDICTING){
generateToast({message: 'It is not possible to change the batch size while an analysis is underway', type:'warning'})
const batch = config[config[config.model].backend].batchSize;
DOM.batchSizeSlider.value = BATCH_SIZE_LIST.indexOf(batch);
DOM.batchSizeValue.textContent = batch;
} else {
DOM.batchSizeValue.textContent = BATCH_SIZE_LIST[DOM.batchSizeSlider.value].toString();
config[config[config.model].backend].batchSize = BATCH_SIZE_LIST[element.value];
worker.postMessage({action: 'change-batch-size', batchSize: BATCH_SIZE_LIST[element.value]})
// Reset region maxLength
initRegion();
}
DOM.batchSizeValue.textContent = BATCH_SIZE_LIST[DOM.batchSizeSlider.value].toString();
config[config[config.model].backend].batchSize = BATCH_SIZE_LIST[element.value];
worker.postMessage({action: 'change-batch-size', batchSize: BATCH_SIZE_LIST[element.value]})
// Reset region maxLength
initRegion();
break }
case 'colourmap': {
config.colormap = element.value;
Expand Down
10 changes: 2 additions & 8 deletions js/worker.js
Original file line number Diff line number Diff line change
Expand Up @@ -2147,14 +2147,8 @@ const parsePredictions = async (response) => {
UI.postMessage({ event: 'progress', progress: progress, file: file });
if (fileProgress === 1) {
if (index === 0 ) {
const result = `No detections found in ${file}. Searched for records using the ${STATE.list} list and having a minimum confidence of ${STATE.detect.confidence/10}%`
UI.postMessage({
event: 'new-result',
file: file,
result: result,
index: index,
selection: STATE.selection
});
const message = `No detections found in ${file}. Searched for records using the ${STATE.list} list and having a minimum confidence of ${STATE.detect.confidence/10}%`
generateAlert({message: message})
}
updateFilesBeingProcessed(response.file)
DEBUG && console.log(`File ${file} processed after ${(new Date() - predictionStart) / 1000} seconds: ${filesBeingProcessed.length} files to go`);
Expand Down

0 comments on commit 2da8d9b

Please sign in to comment.