diff --git a/index.html b/index.html
index 4c76f73f..4dade85b 100644
--- a/index.html
+++ b/index.html
@@ -501,7 +501,7 @@
Audio Export:
diff --git a/js/state.js b/js/state.js
index 82bdab9a..0c6f62dd 100644
--- a/js/state.js
+++ b/js/state.js
@@ -36,7 +36,7 @@ export class State {
this.UUID = 0,
this.track = true,
this.powerSaveBlocker = false,
- this.archive = {location: undefined, format: 'opus', auto: false},
+ this.archive = {location: undefined, format: 'ogg', auto: false},
this.useGUANO = true
}
diff --git a/js/ui.js b/js/ui.js
index 0abf712d..2a50313b 100644
--- a/js/ui.js
+++ b/js/ui.js
@@ -5208,7 +5208,7 @@ async function readLabels(labelFile, updating){
tracking.classList.remove('d-none')
// Update your UI with the progress information
updateProgressBar.value = progressObj.percent;
- if (progressObj.percent > 99.9) tracking.classList.add('d-none')
+ if (progressObj.percent > 99.8) tracking.classList.add('d-none')
}
window.electron.onDownloadProgress((_event, progressObj) => displayProgress(progressObj, 'Downloading the latest update: '));
diff --git a/js/worker.js b/js/worker.js
index 25ce8bc4..1f2cefe7 100644
--- a/js/worker.js
+++ b/js/worker.js
@@ -15,7 +15,7 @@ import { State } from './state.js';
import { sqlite3 } from './database.js';
import {trackEvent} from './tracking.js';
-const DEBUG = false;
+const DEBUG = true;
// Function to join Buffers and not use Buffer.concat() which leads to detached ArrayBuffers
function joinBuffers(buffer1, buffer2) {
@@ -3558,11 +3558,136 @@ async function setIncludedIDs(lat, lon, week) {
///////// Database compression and archive ////
-async function convertAndOrganiseFiles() {
+// async function convertAndOrganiseFiles() {
+// const db = diskDB;
+// let count = 0;
+// let {totalToConvert} = await db.getAsync('SELECT COUNT(*) as totalToConvert from files');
+// const fileProgressMap = {};
+// // Ensure 'archiveName' column exists in the files table
+// await db.runAsync("ALTER TABLE files ADD COLUMN archiveName TEXT")
+// .catch(err => {
+// if (err.message.includes("duplicate column")) {
+// DEBUG && console.log("Column 'archiveName' already exists");
+// } else {
+// console.error("Error adding 'archiveName' column:", err);
+// }
+// });
+// // Query the files table to get the necessary data
+// db.each("SELECT f.id, f.name, f.duration, f.filestart, l.place FROM files f LEFT JOIN locations l ON f.locationID = l.id", async function(err, row) {
+// if (err) {
+// console.error("Error querying the database:", err);
+// return;
+// }
+// row.place ??= STATE.place;
+// // Create the output directory structure based on place and file date
+// const fileDate = new Date(row.filestart);
+// const year = String(fileDate.getFullYear());
+// const month = fileDate.toLocaleString('default', { month: 'long' }); // Get full month name
+// //const day = ''; //String(fileDate.getDate()).padStart(2, '0');
+// const place = row.place?.replace(/[\/\\?%*:|"<>]/g, '_').trim(); // Sanitize the place name
+
+// const inputFilePath = row.name;
+// const outputDir = p.join(place, year, month);
+// const outputFileName = p.basename(inputFilePath, p.extname(inputFilePath)) + '.' + STATE.archive.format;
+// // Check if the file already exists, as is complete
+// const {archiveName} = await db.getAsync('SELECT archiveName FROM files WHERE name = ?', inputFilePath);
+// const fullPath = p.join(STATE.archive.location, outputDir)
+// const fullFilePath = p.join(fullPath, outputFileName)
+// const dbArchiveName = p.join(outputDir, outputFileName)
+// if (archiveName === dbArchiveName && fs.existsSync(fullFilePath)) {
+// totalToConvert--;
+// DEBUG && console.log(`File ${inputFilePath} already converted. Skipping conversion.`);
+// return;
+// }
+
+// if (!fs.existsSync(fullPath)) {
+// fs.mkdirSync(fullPath, { recursive: true });
+// }
+
+// // Convert the file using fluent-ffmpeg
+// let command = ffmpeg(inputFilePath)
+// if (STATE.archive.format === 'opus') {
+// command.audioBitrate('128k')
+// .audioChannels(1) // Set to mono
+// .audioFrequency(26_000) // Set sample rate for BirdNET
+// }
+// let scaleFactor = 1; // When ffmpeg reports progress, it does so against the full length of the file
+// if (STATE.detect.nocmig){
+// METADATA[inputFilePath] || await setMetadata({file: inputFilePath});
+// const boundaries = await setStartEnd(inputFilePath);
+// if (boundaries.length > 1) {
+// UI.postMessage({event: 'generate-alert', message: `Multi-day operations are not yet supported: ${inputFilePath} will not be trimmed`});
+// } else {
+// const {start, end} = boundaries[0];
+// if (start === end) return
+// command.seekInput(start).duration(end - start)
+// scaleFactor = row.duration / (end-start);
+// // Now update the duration for the truncated file to ensure accurate mtimes are set
+// row.duration = end - start;
+// }
+
+// }
+// command.output(fullFilePath)
+// .on('end', () => {
+// console.log(`Converted ${inputFilePath} to ${fullFilePath}`);
+// const newfileMtime = new Date(Math.round(row.filestart + (row.duration * 1000)));
+// utimesSync(fullFilePath, {atime: Date.now(), mtime: newfileMtime});
+// // Update the database with the new file path
+// db.run("UPDATE files SET archiveName = ? WHERE id = ?", [dbArchiveName, row.id], (err) => {
+// if (err) {
+// console.error("Error updating the database:", err);
+// } else {
+// console.log(`Updated database for file: ${inputFilePath}`);
+// }
+// count++;
+// UI.postMessage({event: 'generate-alert', message: `Finished conversion for ${inputFilePath}
+// ${count} of ${totalToConvert} completed`})
+// });
+// })
+// .on('error', (err) => {
+// count++;
+// DEBUG && console.error(`Error converting file ${inputFilePath}:`, err);
+// UI.postMessage({event: 'generate-alert', message: `File not found: ${inputFilePath}`, file: inputFilePath})
+// })
+// .on('start', function (commandLine) {
+// DEBUG && console.log('FFmpeg command: ' + commandLine);
+// })
+// .on('progress', (progress) => {
+// if (!isNaN(progress.percent)){
+// // Calculate the cumulative progress
+// fileProgressMap[inputFilePath] = progress.percent * scaleFactor;
+// console.log(`${inputFilePath} progress: ${fileProgressMap[inputFilePath].toFixed(1)}%`)
+// const values = Object.values(fileProgressMap);
+// // Calculate the sum of the values
+// const sum = values.reduce((accumulator, currentValue) => accumulator + currentValue, 0);
+
+// // Calculate the average
+// const average = sum / values.length;
+
+// UI.postMessage({
+// event: `conversion-progress`,
+// progress: { percent: average }, // Use cumulative progress for smooth transition
+// text: `Archive file conversion progress: ${average.toFixed(1)}% `
+// });
+// }
+// })
+// .run();
+// }
+// );
+// }
+
+
+const pLimit = require('p-limit'); // You can install p-limit with `npm install p-limit`
+
+async function convertAndOrganiseFiles(threadLimit) {
+ threadLimit ??= 1; // Set a default
const db = diskDB;
let count = 0;
let {totalToConvert} = await db.getAsync('SELECT COUNT(*) as totalToConvert from files');
const fileProgressMap = {};
+ const limit = pLimit(threadLimit); // Set the limit based on the number of threads
+ const conversions = []; // Array to hold the conversion promises
+
// Ensure 'archiveName' column exists in the files table
await db.runAsync("ALTER TABLE files ADD COLUMN archiveName TEXT")
.catch(err => {
@@ -3571,29 +3696,29 @@ async function convertAndOrganiseFiles() {
} else {
console.error("Error adding 'archiveName' column:", err);
}
- });
+ });
+
// Query the files table to get the necessary data
db.each("SELECT f.id, f.name, f.duration, f.filestart, l.place FROM files f LEFT JOIN locations l ON f.locationID = l.id", async function(err, row) {
if (err) {
console.error("Error querying the database:", err);
return;
}
+
row.place ??= STATE.place;
- // Create the output directory structure based on place and file date
const fileDate = new Date(row.filestart);
const year = String(fileDate.getFullYear());
- const month = fileDate.toLocaleString('default', { month: 'long' }); // Get full month name
- //const day = ''; //String(fileDate.getDate()).padStart(2, '0');
- const place = row.place?.replace(/[\/\\?%*:|"<>]/g, '_').trim(); // Sanitize the place name
+ const month = fileDate.toLocaleString('default', { month: 'long' });
+ const place = row.place?.replace(/[\/\\?%*:|"<>]/g, '_').trim();
const inputFilePath = row.name;
const outputDir = p.join(place, year, month);
const outputFileName = p.basename(inputFilePath, p.extname(inputFilePath)) + '.' + STATE.archive.format;
- // Check if the file already exists, as is complete
const {archiveName} = await db.getAsync('SELECT archiveName FROM files WHERE name = ?', inputFilePath);
- const fullPath = p.join(STATE.archive.location, outputDir)
- const fullFilePath = p.join(fullPath, outputFileName)
- const dbArchiveName = p.join(outputDir, outputFileName)
+ const fullPath = p.join(STATE.archive.location, outputDir);
+ const fullFilePath = p.join(fullPath, outputFileName);
+ const dbArchiveName = p.join(outputDir, outputFileName);
+
if (archiveName === dbArchiveName && fs.existsSync(fullFilePath)) {
totalToConvert--;
DEBUG && console.log(`File ${inputFilePath} already converted. Skipping conversion.`);
@@ -3603,75 +3728,76 @@ async function convertAndOrganiseFiles() {
if (!fs.existsSync(fullPath)) {
fs.mkdirSync(fullPath, { recursive: true });
}
-
- // Convert the file using fluent-ffmpeg
- let command = ffmpeg(inputFilePath)
- if (STATE.archive.format === 'opus') {
- command.audioBitrate('128k')
+
+ // Add the file conversion to the pool
+ fileProgressMap[inputFilePath] = 0;
+ conversions.push(limit(() => convertFile(inputFilePath, fullFilePath, row, db, dbArchiveName, fileProgressMap, totalToConvert, count++)));
+ });
+
+ // Wait for all conversions to finish
+ await Promise.all(conversions);
+}
+
+async function convertFile(inputFilePath, fullFilePath, row, db, dbArchiveName, fileProgressMap, totalToConvert, count) {
+ METADATA[inputFilePath] || await setMetadata({file: inputFilePath});
+ const boundaries = await setStartEnd(inputFilePath);
+
+ return new Promise((resolve, reject) => {
+ let command = ffmpeg(inputFilePath);
+
+ if (STATE.archive.format === 'ogg') {
+ command.audioBitrate('128k')
.audioChannels(1) // Set to mono
.audioFrequency(26_000) // Set sample rate for BirdNET
+ }
+
+ let scaleFactor = 1;
+ if (STATE.detect.nocmig) {
+ if (boundaries.length > 1) {
+ UI.postMessage({event: 'generate-alert', message: `Multi-day operations are not yet supported: ${inputFilePath} will not be trimmed`});
+ } else {
+ const {start, end} = boundaries[0];
+ if (start === end) return;
+ command.seekInput(start).duration(end - start);
+ scaleFactor = row.duration / (end-start);
+ row.duration = end - start;
}
- let scaleFactor = 1; // When ffmpeg reports progress, it does so against the full length of the file
- if (STATE.detect.nocmig){
- METADATA[inputFilePath] || await setMetadata({file: inputFilePath});
- const boundaries = await setStartEnd(inputFilePath);
- if (boundaries.length > 1) {
- UI.postMessage({event: 'generate-alert', message: `Multi-day operations are not yet supported: ${inputFilePath} will not be trimmed`});
- } else {
- const {start, end} = boundaries[0];
- if (start === end) return
- command.seekInput(start).duration(end - start)
- scaleFactor = row.duration / (end-start);
- // Now update the duration for the truncated file to ensure accurate mtimes are set
- row.duration = end - start;
- }
-
- }
- command.output(fullFilePath)
+ }
+ command.output(fullFilePath)
+ .on('start', function (commandLine) {
+ DEBUG && console.log('FFmpeg command: ' + commandLine);
+ })
.on('end', () => {
- console.log(`Converted ${inputFilePath} to ${fullFilePath}`);
const newfileMtime = new Date(Math.round(row.filestart + (row.duration * 1000)));
utimesSync(fullFilePath, {atime: Date.now(), mtime: newfileMtime});
- // Update the database with the new file path
+
db.run("UPDATE files SET archiveName = ? WHERE id = ?", [dbArchiveName, row.id], (err) => {
if (err) {
- console.error("Error updating the database:", err);
+ console.error("Error updating the database:", err);
} else {
- console.log(`Updated database for file: ${inputFilePath}`);
+ UI.postMessage({event: 'generate-alert', message: `Finished conversion for ${inputFilePath}
${count} of ${totalToConvert} completed`});
}
- count++;
- UI.postMessage({event: 'generate-alert', message: `Finished conversion for ${inputFilePath}
- ${count} of ${totalToConvert} completed`})
+ resolve();
});
})
.on('error', (err) => {
- count++;
DEBUG && console.error(`Error converting file ${inputFilePath}:`, err);
- UI.postMessage({event: 'generate-alert', message: `File not found: ${inputFilePath}`, file: inputFilePath})
- })
- .on('start', function (commandLine) {
- DEBUG && console.log('FFmpeg command: ' + commandLine);
+ UI.postMessage({event: 'generate-alert', message: `File not found: ${inputFilePath}`, file: inputFilePath});
+ reject(err);
})
.on('progress', (progress) => {
- if (!isNaN(progress.percent)){
- // Calculate the cumulative progress
+ if (!isNaN(progress.percent)) {
fileProgressMap[inputFilePath] = progress.percent * scaleFactor;
- console.log(`${inputFilePath} progress: ${fileProgressMap[inputFilePath].toFixed(1)}%`)
const values = Object.values(fileProgressMap);
- // Calculate the sum of the values
const sum = values.reduce((accumulator, currentValue) => accumulator + currentValue, 0);
-
- // Calculate the average
const average = sum / values.length;
-
UI.postMessage({
event: `conversion-progress`,
- progress: { percent: average }, // Use cumulative progress for smooth transition
- text: `Archive file conversion progress: ${average.toFixed(1)}% `
+ progress: { percent: average },
+ text: `Archive file conversion progress: ${average.toFixed(1)}%`
});
}
})
.run();
- }
- );
+ });
}