Skip to content

Commit

Permalink
perf(deleteAction): Queue delete requests
Browse files Browse the repository at this point in the history
When multiple files are deleted at once, all the requests bombard the server
simultaneously, causing performance issues.

This commit adds queuing that limits the concurrency of these requests to 5
at a time.

Signed-off-by: fenn-cs <[email protected]>
  • Loading branch information
nfebe authored and susnux committed May 10, 2024
1 parent 5fc46bc commit 36ec764
Showing 1 changed file with 4 additions and 1 deletion.
5 changes: 4 additions & 1 deletion apps/files/src/actions/deleteAction.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import NetworkOffSvg from '@mdi/svg/svg/network-off.svg?raw'
import TrashCanSvg from '@mdi/svg/svg/trash-can.svg?raw'

import logger from '../logger.js'
import PQueue from 'p-queue'

const canUnshareOnly = (nodes: Node[]) => {
return nodes.every(node => node.attributes['is-mount-root'] === true
Expand Down Expand Up @@ -119,6 +120,8 @@ const displayName = (nodes: Node[], view: View) => {
return t('files', 'Delete')
}

const queue = new PQueue({ concurrency: 5 })

export const action = new FileAction({
id: 'delete',
displayName,
Expand Down Expand Up @@ -183,7 +186,7 @@ export const action = new FileAction({
return Promise.all(nodes.map(() => false))
}

return Promise.all(nodes.map(node => this.exec(node, view, dir)))
return Promise.all(nodes.map(node => queue.add(() => this.exec(node, view, dir))))
},

order: 100,
Expand Down

0 comments on commit 36ec764

Please sign in to comment.