diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9475b817d3..9348929ee7 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -166,7 +166,6 @@ jobs: "tests/end_to_end/candid_rpc/class_syntax/simple_erc20", "tests/end_to_end/candid_rpc/class_syntax/simple_user_accounts", "tests/end_to_end/candid_rpc/class_syntax/stable_b_tree_map_instruction_threshold", - "tests/end_to_end/candid_rpc/class_syntax/stable_memory", "tests/end_to_end/candid_rpc/class_syntax/stable_structures", "tests/end_to_end/candid_rpc/class_syntax/timers", "tests/end_to_end/candid_rpc/class_syntax/tuple_types", @@ -232,7 +231,6 @@ jobs: "tests/end_to_end/candid_rpc/functional_syntax/simple_erc20", "tests/end_to_end/candid_rpc/functional_syntax/simple_user_accounts", "tests/end_to_end/candid_rpc/functional_syntax/stable_b_tree_map_instruction_threshold", - "tests/end_to_end/candid_rpc/functional_syntax/stable_memory", "tests/end_to_end/candid_rpc/functional_syntax/stable_structures", "tests/end_to_end/candid_rpc/functional_syntax/timers", "tests/end_to_end/candid_rpc/functional_syntax/tuple_types", diff --git a/Cargo.lock b/Cargo.lock index 3449f0f93b..37b1fced8e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -218,8 +218,8 @@ dependencies = [ "anyhow", "candid", "candid_parser", - "ic-cdk", - "ic-cdk-macros", + "ic-cdk 0.12.1", + "ic-cdk-macros 0.8.4", "ic-cdk-timers", "ic-stable-structures", "ic-wasi-polyfill", @@ -689,7 +689,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f3d204af0b11c45715169c997858edb58fa8407d08f4fae78a6b415dd39a362" dependencies = [ "candid", - "ic-cdk-macros", + "ic-cdk-macros 0.8.4", + "ic0", + "serde", + "serde_bytes", +] + +[[package]] +name = "ic-cdk" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8859bc2b863a77750acf199e1fb7e3fc403e1b475855ba13f59cb4e4036d238" +dependencies = [ + "candid", + "ic-cdk-macros 0.13.2", "ic0", "serde", "serde_bytes", @@ -709,6 +722,20 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "ic-cdk-macros" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a45800053d80a6df839a71aaea5797e723188c0b992618208ca3b941350c7355" +dependencies = [ + "candid", + "proc-macro2", + "quote", + "serde", + "serde_tokenstream", + "syn 1.0.109", +] + [[package]] name = "ic-cdk-timers" version = "0.6.0" @@ -716,7 +743,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c43b9706fef3ad10c4192a14801d16bd9539068239f0f06f257857441364329" dependencies = [ "futures", - "ic-cdk", + "ic-cdk 0.12.1", "ic0", "serde", "serde_bytes", @@ -725,20 +752,21 @@ dependencies = [ [[package]] name = "ic-stable-structures" -version = "0.6.2" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "774d7d26420c095f2b5f0f71f7b2ff4a5b58b87e0959dccc78b3d513a7db5112" +checksum = "03f3044466a69802de74e710dc0300b706a05696a0531c942ca856751a13b0db" dependencies = [ "ic_principal", ] [[package]] name = "ic-wasi-polyfill" -version = "0.4.0" -source = "git+https://github.com/wasm-forge/ic-wasi-polyfill?rev=88bddc8190caf93a1e052f0513b5d6bc074929c3#88bddc8190caf93a1e052f0513b5d6bc074929c3" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb0693d039ca70039186dbe8c15a280e3f363431549ea7c25c6cf855280130d" dependencies = [ "function_name", - "ic-cdk", + "ic-cdk 0.13.2", "ic-stable-structures", "rand", "stable-fs", @@ -1034,8 +1062,8 @@ version = "0.0.0" dependencies = [ "anyhow", "candid", - "ic-cdk", - "ic-cdk-macros", + "ic-cdk 0.12.1", + "ic-cdk-macros 0.8.4", "ic-cdk-timers", "ic-stable-structures", "proptest", @@ -1514,13 +1542,13 @@ checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" [[package]] name = "stable-fs" -version = "0.3.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b5c2803cdd539ff47bb6265fab36e26949996c22ce4121fc338d3990b734ab4" +checksum = "fff27ea5e6ce61679ebc4394ec618270478e2d3165bc737123e4d011c651012e" dependencies = [ "bitflags 2.4.0", "ciborium", - "ic-cdk", + "ic-cdk 0.13.2", "ic-stable-structures", "serde", "serde_bytes", diff --git a/global_dependencies b/global_dependencies new file mode 100644 index 0000000000..84293fce4d --- /dev/null +++ b/global_dependencies @@ -0,0 +1,6 @@ +wasi2ic version: wasi2ic v0.2.0 (https://github.com/wasm-forge/wasi2ic?rev=806c3558aad24224852a9582f018178402cb3679#806c3558): + wasi2ic + +node version: v20.11.0 + +rustc version: rustc 1.78.0 (9b00956e5 2024-04-29) diff --git a/scripts/file_generator/index.ts b/scripts/file_generator/index.ts index 92c21c846f..a042b6a9af 100644 --- a/scripts/file_generator/index.ts +++ b/scripts/file_generator/index.ts @@ -45,7 +45,7 @@ async function fillFileWithRandomBytes( for (let i = 0; i < totalChunks; i++) { const remainingBytes = sizeInBytes - i * defaultChunkSize; const chunkSize = Math.min(remainingBytes, defaultChunkSize); - const randomBytes = await createRandomBytes(chunkSize); + const randomBytes = createRandomBytes(chunkSize); await appendFile(path, randomBytes); } } diff --git a/scripts/jest_isolate_modules.js b/scripts/jest_isolate_modules.js index 3371e18323..76ff5922b3 100644 --- a/scripts/jest_isolate_modules.js +++ b/scripts/jest_isolate_modules.js @@ -26,5 +26,5 @@ fs.writeFile(jestConfigPath, newJestConfigContent, 'utf8', (err) => { return; } - console.log('jest.config.js successfully overwritten.'); + console.info('jest.config.js successfully overwritten.'); }); diff --git a/src/compiler/file_uploader/incomplete_files.ts b/src/compiler/file_uploader/incomplete_files.ts index 6693141dc0..7a98ebae16 100644 --- a/src/compiler/file_uploader/incomplete_files.ts +++ b/src/compiler/file_uploader/incomplete_files.ts @@ -17,7 +17,7 @@ async function hasValidHash( actor: UploaderActor ): Promise { try { - const hashOption = await actor.get_file_hash(path); + const hashOption = await actor._azle_get_file_hash(path); return hashOption.length === 1; } catch { return false; diff --git a/src/compiler/file_uploader/index.ts b/src/compiler/file_uploader/index.ts index 848ec8468f..c0b4f78759 100644 --- a/src/compiler/file_uploader/index.ts +++ b/src/compiler/file_uploader/index.ts @@ -12,7 +12,10 @@ export async function uploadFiles( canisterName: string, paths: [Src, Dest][] ): Promise { - if (paths.length === 0) { + if ( + paths.length === 0 || + process.env.AZLE_DISABLE_AUTO_FILE_UPLOAD === 'true' + ) { return; } @@ -30,7 +33,7 @@ export async function uploadFiles( } console.info( - 'Finished uploading files. Waiting for file hashing to finish...' + 'Finished uploading files. Waiting for all chunks to finish uploading...' // TODO remove after https://github.com/demergent-labs/azle/issues/1996 is complete ); onBeforeExit(expandedPaths, actor); diff --git a/src/compiler/file_uploader/on_before_exit.ts b/src/compiler/file_uploader/on_before_exit.ts index e0ce3090f3..5109335b17 100644 --- a/src/compiler/file_uploader/on_before_exit.ts +++ b/src/compiler/file_uploader/on_before_exit.ts @@ -1,12 +1,9 @@ import { Dest, Src } from '.'; import { getListOfIncompleteFiles } from './incomplete_files'; -import { getOngoingHashingJobs, OngoingHashingJob } from './ongoing_hashes'; import { UploaderActor } from './uploader_actor'; export function onBeforeExit(paths: [Src, Dest][], actor: UploaderActor): void { - let hashingComplete = false; let cleanUpComplete = false; - let ongoingHashingJobs: OngoingHashingJob[] = []; process.on('beforeExit', async () => { if (cleanUpComplete) { @@ -17,26 +14,10 @@ export function onBeforeExit(paths: [Src, Dest][], actor: UploaderActor): void { return; } - if (hashingComplete) { - await cleanup(paths, actor); - cleanUpComplete = true; - return; - } - - ongoingHashingJobs = await getOngoingHashingJobs( - paths, - ongoingHashingJobs, - actor - ); - - hashingComplete = ongoingHashingJobs.length === 0; - - if (!hashingComplete) { - console.info( - `Waiting 5 seconds before checking hashing status again...` - ); - await new Promise((resolve) => setTimeout(resolve, 5000)); - } + console.info('Cleaning up incomplete files'); + await cleanup(paths, actor); + cleanUpComplete = true; + return; }); } @@ -46,6 +27,6 @@ async function cleanup( ): Promise { const incompleteFiles = await getListOfIncompleteFiles(paths, actor); for (const [_, path] of incompleteFiles) { - await actor.clear_file_and_info(path); + await actor._azle_clear_file_and_info(path); } } diff --git a/src/compiler/file_uploader/ongoing_hashes.ts b/src/compiler/file_uploader/ongoing_hashes.ts deleted file mode 100644 index 77cd0c9eac..0000000000 --- a/src/compiler/file_uploader/ongoing_hashes.ts +++ /dev/null @@ -1,166 +0,0 @@ -import { Dest, Src } from '.'; -import { getListOfIncompleteFiles } from './incomplete_files'; -import { UploaderActor } from './uploader_actor'; - -export type OngoingHashingJob = { - path: string; - bytesHashed: bigint; - totalBytes: bigint; - triesSinceLastChange: number; -}; - -type AmountComplete = bigint; -type Total = bigint; -type HashStatus = [AmountComplete, Total]; -type HashStatuses = { [path: string]: HashStatus }; - -export async function getOngoingHashingJobs( - paths: [Src, Dest][], - previouslyOngoingJobs: OngoingHashingJob[], - actor: UploaderActor -): Promise { - const incompleteFiles = await getListOfIncompleteFiles(paths, actor); - - if (incompleteFiles.length === 0) { - return []; - } - - const incompleteDestPaths = incompleteFiles.map( - ([_, destPath]) => destPath - ); - - const incompleteHashingJobs = await updateOngoingHashingJobs( - previouslyOngoingJobs, - incompleteDestPaths, - actor - ); - - const isHashingStalled = incompleteHashingJobs.every( - // If the hash hasn't progressed in the last 5 tries it is considered failed instead of ongoing - (fileInfo) => fileInfo.triesSinceLastChange >= 5 - ); - - if (isHashingStalled) { - // At the point jobs are considered incomplete instead of ongoing. There - // are no ongoing jobs to report - reportIncompleteHashingJobs(incompleteFiles); - return []; - } - - reportOngoingHashingJobs(incompleteHashingJobs); - - return incompleteHashingJobs; -} - -async function updateOngoingHashingJobs( - previouslyOngoingJobs: OngoingHashingJob[], - incompletePaths: string[], - actor: UploaderActor -): Promise { - const latestHashStatuses = await getHashStatuses(incompletePaths, actor); - const previouslyOngoingHashingJobs = initializePreviousJobsIfNeeded( - previouslyOngoingJobs, - latestHashStatuses, - incompletePaths - ); - - return previouslyOngoingHashingJobs.map((hashInfo): OngoingHashingJob => { - const newBytesHashed = latestHashStatuses[hashInfo.path][0]; - if (hashInfo.bytesHashed === newBytesHashed) { - return incrementTries(hashInfo); - } else { - return updateBytes(hashInfo, newBytesHashed); - } - }); -} - -function incrementTries(hashJob: OngoingHashingJob): OngoingHashingJob { - return { - ...hashJob, - triesSinceLastChange: hashJob.triesSinceLastChange + 1 - }; -} - -function updateBytes( - hashJob: OngoingHashingJob, - newBytesHashed: bigint -): OngoingHashingJob { - return { - ...hashJob, - bytesHashed: newBytesHashed, - triesSinceLastChange: 0 - }; -} - -function reportOngoingHashingJobs(ongoingHashInfo: OngoingHashingJob[]): void { - for (const hashInfo of ongoingHashInfo) { - const percent = - (Number(hashInfo.bytesHashed) / Number(hashInfo.totalBytes)) * 100; - console.info(`${hashInfo.path} at ${percent.toFixed(2)}% hashed`); - } - console.info(); -} - -function reportIncompleteHashingJobs(paths: [Src, Dest][]): void { - console.info( - `Missing hashes for ${paths.length} files:\n${paths.join('\n')}.` - ); -} - -function initializePreviousJobsIfNeeded( - previousHashInfos: OngoingHashingJob[], - hashStatuses: HashStatuses, - incompletePaths: string[] -): OngoingHashingJob[] { - return previousHashInfos.length > 0 - ? previousHashInfos.filter((hashInfo) => - incompletePaths.includes(hashInfo.path) - ) - : incompletePaths.map((path): OngoingHashingJob => { - return { - path, - triesSinceLastChange: 0, - bytesHashed: hashStatuses[path][0], - totalBytes: hashStatuses[path][1] - }; - }); -} - -async function getHashStatuses( - incompleteFiles: string[], - actor: UploaderActor -): Promise { - return await incompleteFiles.reduce( - async ( - accPromise: Promise, - path - ): Promise => { - const acc = await accPromise; - return { ...acc, [path]: await getHashStatus(path, actor) }; - }, - Promise.resolve({}) - ); -} - -/** - * Returns a tuple with the amount complete at index 0 and the total files size - * at index 1 - * - * This is only meant to be used for status updates. The true test for if a hash - * is complete is to get the hash. - * - * @param canisterId The ID of the canister containing the file - * @param path The path to the file - * @returns a tuple with the amount complete and the total file size - */ -async function getHashStatus( - path: string, - actor: UploaderActor -): Promise<[AmountComplete, Total]> { - const result = await actor.get_hash_status(path); - if (result.length === 0) { - // Files doesn't exist - return [0n, 0n]; - } - return result[0]; -} diff --git a/src/compiler/file_uploader/upload_file.ts b/src/compiler/file_uploader/upload_file.ts index 9328dabdf0..c285974728 100644 --- a/src/compiler/file_uploader/upload_file.ts +++ b/src/compiler/file_uploader/upload_file.ts @@ -41,7 +41,7 @@ export async function uploadFile( // Don't await here! Awaiting the agent will result in about a 4x increase in upload time. // The above throttling is sufficient to manage the speed of uploads actor - .upload_file_chunk( + ._azle_upload_file_chunk( destPath, uploadStartTime, BigInt(startIndex), @@ -81,7 +81,7 @@ async function shouldBeUploaded( actor: UploaderActor ): Promise { const localHash = (await hashFile(srcPath)).toString('hex'); - const canisterHashOption = await actor.get_file_hash(destPath); + const canisterHashOption = await actor._azle_get_file_hash(destPath); if (canisterHashOption.length === 0) { return true; } diff --git a/src/compiler/file_uploader/uploader_actor.ts b/src/compiler/file_uploader/uploader_actor.ts index 8f4e60194d..cf6d7cc43f 100644 --- a/src/compiler/file_uploader/uploader_actor.ts +++ b/src/compiler/file_uploader/uploader_actor.ts @@ -13,14 +13,13 @@ export async function createActor( return Actor.createActor<_SERVICE>( ({ IDL }) => { return IDL.Service({ - clear_file_and_info: IDL.Func([IDL.Text], [], []), - get_file_hash: IDL.Func([IDL.Text], [IDL.Opt(IDL.Text)], []), - get_hash_status: IDL.Func( + _azle_clear_file_and_info: IDL.Func([IDL.Text], [], []), + _azle_get_file_hash: IDL.Func( [IDL.Text], - [IDL.Opt(IDL.Tuple(IDL.Nat64, IDL.Nat64))], + [IDL.Opt(IDL.Text)], [] ), - upload_file_chunk: IDL.Func( + _azle_upload_file_chunk: IDL.Func( [ IDL.Text, IDL.Nat64, @@ -41,10 +40,9 @@ export async function createActor( } interface _SERVICE { - clear_file_and_info: ActorMethod<[string], void>; - get_file_hash: ActorMethod<[string], [] | [string]>; - get_hash_status: ActorMethod<[string], [] | [[bigint, bigint]]>; - upload_file_chunk: ActorMethod< + _azle_clear_file_and_info: ActorMethod<[string], void>; + _azle_get_file_hash: ActorMethod<[string], [] | [string]>; + _azle_upload_file_chunk: ActorMethod< [string, bigint, bigint, Uint8Array, bigint], void >; diff --git a/src/compiler/generate_wasm_binary.ts b/src/compiler/generate_wasm_binary.ts index 70971b26c5..6ec97e7cbd 100644 --- a/src/compiler/generate_wasm_binary.ts +++ b/src/compiler/generate_wasm_binary.ts @@ -1,5 +1,6 @@ import { IOType } from 'child_process'; +import { logGlobalDependencies } from './log_global_dependencies'; import { manipulateWasmBinary } from './manipulate_wasm_binary'; import { prepareRustStagingArea } from './prepare_rust_staging_area'; import { execSyncPretty } from './utils/exec_sync_pretty'; @@ -15,6 +16,8 @@ export async function generateWasmBinary( canisterPath: string ): Promise { if (process.env.AZLE_GEN_WASM === 'true') { + await logGlobalDependencies(); + await prepareRustStagingArea(canisterConfig, canisterPath); compileRustCodeNatively( diff --git a/src/compiler/log_global_dependencies.ts b/src/compiler/log_global_dependencies.ts new file mode 100644 index 0000000000..dc7a498863 --- /dev/null +++ b/src/compiler/log_global_dependencies.ts @@ -0,0 +1,23 @@ +import { writeFile } from 'fs/promises'; +import { join } from 'path'; + +import { execSyncPretty } from './utils/exec_sync_pretty'; +import { AZLE_PACKAGE_PATH } from './utils/global_paths'; + +export async function logGlobalDependencies(): Promise { + const wasiVersion = execSyncPretty('cargo install --list | grep wasi2ic'); + const nodeVersion = execSyncPretty('node --version'); + const rustVersion = execSyncPretty('rustc --version'); + + const globalDependencies = + `wasi2ic version: ${wasiVersion}` + + `\n` + + `node version: ${nodeVersion}` + + `\n` + + `rustc version: ${rustVersion}`; + + await writeFile( + join(AZLE_PACKAGE_PATH, 'global_dependencies'), + globalDependencies + ); +} diff --git a/src/compiler/rust/canister/Cargo.toml b/src/compiler/rust/canister/Cargo.toml index 8fa6bf6421..22b8806d0c 100644 --- a/src/compiler/rust/canister/Cargo.toml +++ b/src/compiler/rust/canister/Cargo.toml @@ -13,7 +13,7 @@ ic-cdk-macros = "0.8.4" ic-cdk-timers = "0.6.0" candid = "0.10.2" candid_parser = "0.1.2" -ic-stable-structures = "0.6.2" +ic-stable-structures = "0.6.5" open_value_sharing = { path = "../open_value_sharing" } slotmap = "=1.0.6" wasmi = "0.31.2" @@ -21,11 +21,7 @@ sha2 = "0.10.8" serde = "1.0.202" serde_json = "1.0.107" -# TODO transient feature can be removed once https://github.com/demergent-labs/azle/issues/1731 is resolved -# ic-wasi-polyfill = { git = "https://github.com/wasm-forge/ic-wasi-polyfill", rev = "88bddc8190caf93a1e052f0513b5d6bc074929c3" } -ic-wasi-polyfill = { git = "https://github.com/wasm-forge/ic-wasi-polyfill", rev = "88bddc8190caf93a1e052f0513b5d6bc074929c3", features = [ - "transient", -] } +ic-wasi-polyfill = "0.5.0" wasmedge_quickjs = { git = "https://github.com/demergent-labs/wasmedge-quickjs", rev = "c21ff69f442998e4cda4619166e23a9bc91418be" } # wasmedge_quickjs = { path = "/home/wasmedge-quickjs" } diff --git a/src/compiler/rust/canister/src/chunk.rs b/src/compiler/rust/canister/src/chunk.rs new file mode 100644 index 0000000000..e54c48cf3d --- /dev/null +++ b/src/compiler/rust/canister/src/chunk.rs @@ -0,0 +1,6 @@ +pub async fn chunk() { + let id = ic_cdk::id(); + let method = "_azle_chunk"; + let args_raw = [68, 73, 68, 76, 0, 0]; // '()' pre encoded + let _ = ic_cdk::api::call::call_raw128(id, method, args_raw, 0).await; +} diff --git a/src/compiler/rust/canister/src/guards.rs b/src/compiler/rust/canister/src/guards.rs new file mode 100644 index 0000000000..7649f2239e --- /dev/null +++ b/src/compiler/rust/canister/src/guards.rs @@ -0,0 +1,8 @@ +pub fn guard_against_non_controllers() -> Result<(), String> { + if ic_cdk::api::is_controller(&ic_cdk::api::caller()) { + return Ok(()); + } + return Err( + "Not Authorized: only controllers of this canister may call this method".to_string(), + ); +} diff --git a/src/compiler/rust/canister/src/ic/accept_message.rs b/src/compiler/rust/canister/src/ic/accept_message.rs index 140e008629..04eca52ec8 100644 --- a/src/compiler/rust/canister/src/ic/accept_message.rs +++ b/src/compiler/rust/canister/src/ic/accept_message.rs @@ -2,7 +2,7 @@ use wasmedge_quickjs::{Context, JsFn, JsValue}; pub struct NativeFunction; impl JsFn for NativeFunction { - fn call(context: &mut Context, this_val: JsValue, argv: &[JsValue]) -> JsValue { + fn call(_context: &mut Context, _this_val: JsValue, _argv: &[JsValue]) -> JsValue { ic_cdk::api::call::accept_message(); JsValue::UnDefined diff --git a/src/compiler/rust/canister/src/ic/arg_data_raw.rs b/src/compiler/rust/canister/src/ic/arg_data_raw.rs index d7e26fe9f0..180afed03f 100644 --- a/src/compiler/rust/canister/src/ic/arg_data_raw.rs +++ b/src/compiler/rust/canister/src/ic/arg_data_raw.rs @@ -2,7 +2,7 @@ use wasmedge_quickjs::{Context, JsFn, JsValue}; pub struct NativeFunction; impl JsFn for NativeFunction { - fn call(context: &mut Context, this_val: JsValue, argv: &[JsValue]) -> JsValue { + fn call(context: &mut Context, _this_val: JsValue, argv: &[JsValue]) -> JsValue { context .new_array_buffer(&ic_cdk::api::call::arg_data_raw()) .into() diff --git a/src/compiler/rust/canister/src/ic/notify_raw.rs b/src/compiler/rust/canister/src/ic/notify_raw.rs index 00d10c5081..bab56b19b5 100644 --- a/src/compiler/rust/canister/src/ic/notify_raw.rs +++ b/src/compiler/rust/canister/src/ic/notify_raw.rs @@ -46,7 +46,7 @@ impl JsFn for NativeFunction { // Err(anyhow::anyhow!(err_string)) - panic!(err_string); + panic!("{}", err_string); } } } diff --git a/src/compiler/rust/canister/src/lib.rs b/src/compiler/rust/canister/src/lib.rs index 7bddc36fd5..ad54d40e07 100644 --- a/src/compiler/rust/canister/src/lib.rs +++ b/src/compiler/rust/canister/src/lib.rs @@ -1,4 +1,9 @@ -use std::{cell::RefCell, collections::BTreeMap, collections::HashMap, convert::TryInto}; +use std::{ + cell::RefCell, + collections::{BTreeMap, HashMap}, + convert::TryInto, + env::args, +}; use ic_stable_structures::{ memory_manager::{MemoryId, MemoryManager, VirtualMemory}, @@ -10,9 +15,15 @@ use serde::{Deserialize, Serialize}; use std::fs; use wasmedge_quickjs::AsObject; +mod chunk; +mod guards; mod ic; +mod upload_file; mod web_assembly; +use guards::guard_against_non_controllers; +use upload_file::Timestamp; + #[allow(unused)] type Memory = VirtualMemory; #[allow(unused)] @@ -57,7 +68,6 @@ impl Storable for AzleStableBTreeMapValue { } type Hash = Option>; -type Timestamp = u64; type BytesReceived = u64; type BytesHashed = u64; @@ -199,7 +209,7 @@ pub fn get_candid_pointer() -> *mut std::os::raw::c_char { fn run_event_loop(context: &mut wasmedge_quickjs::Context) { context.promise_loop_poll(); - while (true) { + loop { let num_tasks = context.event_loop().unwrap().run_tick_task(); context.promise_loop_poll(); @@ -322,6 +332,8 @@ pub fn init(function_index: i32, pass_arg_data: i32) { ic_cdk::spawn(async move { open_value_sharing::init(&wasm_data.consumer).await; }); + + upload_file::init_hashes().unwrap(); } #[no_mangle] @@ -467,331 +479,29 @@ fn reload_js( } #[ic_cdk_macros::update(guard = guard_against_non_controllers)] -pub fn upload_file_chunk( +pub async fn _azle_upload_file_chunk( dest_path: String, timestamp: u64, start_index: u64, file_bytes: Vec, total_file_len: u64, ) { - let is_latest_version = check_if_latest_version(&dest_path, timestamp); - - if !is_latest_version { - return; - } - - let uploaded_file_len = - write_chunk(&dest_path, file_bytes, start_index, total_file_len).unwrap(); - - let percentage_complete = uploaded_file_len as f64 / total_file_len.max(1) as f64 * 100.0; - ic_cdk::println!( - "Received chunk: {} | {}/{} : {:.2}%", + upload_file::upload_file_chunk( dest_path, - bytes_to_human_readable(uploaded_file_len), - bytes_to_human_readable(total_file_len), - percentage_complete - ); - - if uploaded_file_len == total_file_len { - start_hash(dest_path) - } -} - -pub fn guard_against_non_controllers() -> Result<(), String> { - if ic_cdk::api::is_controller(&ic_cdk::api::caller()) { - return Ok(()); - } - return Err( - "Not Authorized: only controllers of this canister may call this method".to_string(), - ); -} - -pub fn start_hash(dest_path: String) { - let delay = core::time::Duration::new(0, 0); - let hash_closure = || hash_file(dest_path); - ic_cdk_timers::set_timer(delay, hash_closure); -} - -pub fn bytes_to_human_readable(size_in_bytes: u64) -> String { - let suffixes = ["B", "KiB", "MiB", "GiB"]; - let size = size_in_bytes as f64; - - let result = suffixes.iter().fold( - (size, suffixes[0], false), - |(remaining_size, selected_suffix, done), suffix| { - if done { - return (remaining_size, selected_suffix, done); - } - if remaining_size < 1024.0 { - (remaining_size, suffix, true) - } else { - (remaining_size / 1024.0, suffix, false) - } - }, - ); - - format!("{:.2} {}", result.0, result.1) -} - -// Adds the given file_bytes to the chunked file at the chunk number position. -// Returns the new total length of chunked file after the addition -pub fn write_chunk( - path: &str, - file_bytes: Vec, - start_index: u64, - file_len: u64, -) -> std::io::Result { - match std::path::Path::new(path).parent() { - Some(dir_path) => std::fs::create_dir_all(dir_path)?, - None => (), //Dir doesn't need to be created - }; - - let mut file: std::fs::File = match std::fs::OpenOptions::new().write(true).open(path) { - Ok(file) => file, - Err(_) => init_file(path, file_len)?, - }; - - std::io::Seek::seek(&mut file, std::io::SeekFrom::Start(start_index))?; - std::io::Write::write_all(&mut file, &file_bytes)?; - drop(file); - - Ok(set_bytes_received(path, file_bytes.len())) -} - -fn init_file(path: &str, file_len: u64) -> std::io::Result { - let new_file = std::fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&path)?; - new_file.set_len(file_len)?; - Ok(new_file) -} - -fn set_bytes_received(dest_path: &str, bytes_in_chunk: usize) -> u64 { - FILE_INFO.with(|total_bytes_received| { - let mut total_bytes_received_mut = total_bytes_received.borrow_mut(); - match total_bytes_received_mut.get_mut(dest_path) { - Some((_, total_bytes, _, _)) => { - *total_bytes += bytes_in_chunk as u64; - *total_bytes - } - None => panic!("Couldn't find file info for {}", dest_path), - } - }) -} - -pub fn check_if_latest_version(dest_path: &str, current_timestamp: Timestamp) -> bool { - let last_recorded_timestamp = get_timestamp(dest_path); - - if current_timestamp < last_recorded_timestamp { - // The request is from an earlier upload attempt. Disregard - return false; - } - - if current_timestamp > last_recorded_timestamp { - // The request is from a newer upload attempt. Clean up the previous attempt. - reset_for_new_upload(dest_path, current_timestamp).unwrap(); - } - - true -} - -fn get_timestamp(path: &str) -> Timestamp { - FILE_INFO.with(|uploaded_file_timestamps_map| { - match uploaded_file_timestamps_map.borrow().get(path) { - Some((timestamp, _, _, _)) => timestamp.clone(), - None => 0, - } - }) + timestamp, + start_index, + file_bytes, + total_file_len, + ) + .await } #[ic_cdk_macros::update(guard = guard_against_non_controllers)] -pub fn clear_file_and_info(path: String) { - reset_for_new_upload(&path, 0).unwrap() -} - -fn reset_for_new_upload(path: &str, timestamp: Timestamp) -> std::io::Result<()> { - delete_if_exists(path)?; - initialize_file_info(path, timestamp); - Ok(()) -} - -fn initialize_file_info(path: &str, timestamp: Timestamp) { - FILE_INFO.with(|file_info| { - let mut file_info_mut = file_info.borrow_mut(); - - file_info_mut.insert(path.to_string(), (timestamp, 0, None, 0)); - }); -} - -fn delete_if_exists(path: &str) -> std::io::Result<()> { - if std::fs::metadata(path).is_ok() { - std::fs::remove_file(path)?; - } - Ok(()) -} - -pub fn hash_file(path: String) { - clear_file_hash(&path); - hash_file_by_parts(&path, 0) -} - -#[ic_cdk_macros::query(guard = guard_against_non_controllers)] -pub fn get_file_hash(path: String) -> Option { - Some( - load_hashes() - .unwrap() - .get(&path)? - .iter() - .map(|bytes| format!("{:02x}", bytes)) - .collect(), - ) +pub fn _azle_clear_file_and_info(path: String) { + upload_file::reset_for_new_upload(&path, 0).unwrap() } #[ic_cdk_macros::query(guard = guard_against_non_controllers)] -pub fn get_hash_status(path: String) -> Option<(u64, u64)> { - Some((get_bytes_hashed(&path), get_file_size(&path)?)) -} - -fn hash_file_by_parts(path: &str, position: u64) { - let file_length = std::fs::metadata(path).unwrap().len(); - let percentage = position / file_length.max(1) * 100; - ic_cdk::println!( - "Hashing: {} | {}/{} : {:.2}%", - path, - bytes_to_human_readable(position), - bytes_to_human_readable(file_length), - percentage - ); - let mut file = std::fs::File::open(&path).unwrap(); - - std::io::Seek::seek(&mut file, std::io::SeekFrom::Start(position)).unwrap(); - - // Read the bytes - let limit = 75 * 1024 * 1024; // This limit will be determine by how much hashing an update method can do without running out of cycles. It runs out somewhere between 75 and 80 - // This limit must be the same as on the node side or else the hashes will not match - let mut buffer = vec![0; limit]; - let bytes_read = std::io::Read::read(&mut file, &mut buffer).unwrap(); - - let previous_hash = get_partial_file_hash(path); - - if bytes_read != 0 { - let new_hash = hash_chunk_with(&buffer, previous_hash.as_ref()); - set_partial_hash(path, new_hash); - set_bytes_hashed(path, position + bytes_read as u64); - spawn_hash_by_parts(path.to_string(), position + bytes_read as u64) - } else { - // No more bytes to hash, set as final hash for this file - let final_hash = match previous_hash { - Some(hash) => hash, - None => { - let empty_file_hash = hash_chunk_with(&[], None); - empty_file_hash - } - }; - set_file_hash(path, final_hash); - clear_file_info(path); - } -} - -fn spawn_hash_by_parts(path: String, position: u64) { - let delay = core::time::Duration::new(0, 0); - let closure = move || hash_file_by_parts(&path, position); - ic_cdk_timers::set_timer(delay, closure); -} - -pub fn get_partial_file_hash(path: &str) -> Option> { - FILE_INFO - .with(|file_info| Some(file_info.borrow().get(path)?.2.clone())) - .flatten() -} - -fn set_partial_hash(path: &str, hash: Vec) { - FILE_INFO.with(|file_hashes| { - if let Some(entry) = file_hashes.borrow_mut().get_mut(path) { - entry.2 = Some(hash); - } else { - panic!("Couldn't find file info for {}", path) - } - }); -} - -fn clear_file_info(path: &str) { - FILE_INFO.with(|file_info| file_info.borrow_mut().remove(path)); -} - -fn clear_file_hash(path: &str) { - let mut file_hashes = load_hashes().unwrap(); - file_hashes.remove(path); - save_hashes(&file_hashes).unwrap(); -} - -fn set_file_hash(path: &str, hash: Vec) { - let mut file_hashes = load_hashes().unwrap(); - file_hashes.insert(path.to_string(), hash); - save_hashes(&file_hashes).unwrap(); -} - -fn get_bytes_hashed(path: &str) -> u64 { - FILE_INFO.with(|file_info| { - let file_info = file_info.borrow(); - match file_info.get(path) { - Some(file_info) => file_info.clone().3, - None => get_file_size(path).unwrap(), - } - }) -} - -fn set_bytes_hashed(path: &str, bytes_hashed: u64) { - FILE_INFO.with(|file_info| { - let mut file_info_mut = file_info.borrow_mut(); - if let Some(file_info_entry) = file_info_mut.get_mut(path) { - file_info_entry.3 = bytes_hashed; - } - }); -} - -fn get_file_size(path: &str) -> Option { - match std::fs::metadata(path) { - Ok(metadata) => Some(metadata.len()), - Err(_) => None, - } -} - -pub fn get_file_hash_path() -> std::path::PathBuf { - std::path::Path::new(".config") - .join("azle") - .join("file_hashes.json") -} - -fn hash_chunk_with(data: &[u8], previous_hash: Option<&Vec>) -> Vec { - let mut h: sha2::Sha256 = sha2::Digest::new(); - sha2::Digest::update(&mut h, data); - if let Some(hash) = previous_hash { - sha2::Digest::update(&mut h, hash); - } - sha2::Digest::finalize(h).to_vec() -} - -fn load_hashes() -> Result>, std::io::Error> { - let file_hash_path = get_file_hash_path(); - if !file_hash_path.exists() { - // If File doesn't exist yet return empty hash map - return Ok(HashMap::new()); - } - let buffer = std::fs::read(file_hash_path)?; - - Ok(if buffer.is_empty() { - // If File is empty return empty hash map - HashMap::new() - } else { - serde_json::from_slice(&buffer)? - }) -} - -fn save_hashes(file_hashes: &HashMap>) -> Result<(), std::io::Error> { - let data = serde_json::to_vec(file_hashes)?; - - std::fs::write(get_file_hash_path(), data) +pub fn _azle_get_file_hash(path: String) -> Option { + upload_file::get_file_hash(path) } diff --git a/src/compiler/rust/canister/src/upload_file/bytes_to_human_readable.rs b/src/compiler/rust/canister/src/upload_file/bytes_to_human_readable.rs new file mode 100644 index 0000000000..91d90a6c6c --- /dev/null +++ b/src/compiler/rust/canister/src/upload_file/bytes_to_human_readable.rs @@ -0,0 +1,20 @@ +pub fn bytes_to_human_readable(size_in_bytes: u64) -> String { + let suffixes = ["B", "KiB", "MiB", "GiB"]; + let size = size_in_bytes as f64; + + let result = suffixes.iter().fold( + (size, suffixes[0], false), + |(remaining_size, selected_suffix, done), suffix| { + if done { + return (remaining_size, selected_suffix, done); + } + if remaining_size < 1024.0 { + (remaining_size, suffix, true) + } else { + (remaining_size / 1024.0, suffix, false) + } + }, + ); + + format!("{:.2} {}", result.0, result.1) +} diff --git a/src/compiler/rust/canister/src/upload_file/file_info.rs b/src/compiler/rust/canister/src/upload_file/file_info.rs new file mode 100644 index 0000000000..9157b197c4 --- /dev/null +++ b/src/compiler/rust/canister/src/upload_file/file_info.rs @@ -0,0 +1,62 @@ +use crate::upload_file::Timestamp; +use crate::FILE_INFO; +use std::{cell::RefCell, collections::BTreeMap}; + +pub fn initialize_file_info(path: &str, timestamp: Timestamp) { + FILE_INFO.with(|file_info| { + let mut file_info_mut = file_info.borrow_mut(); + + file_info_mut.insert(path.to_string(), (timestamp, 0, None, 0)); + }); +} + +pub fn get_partial_file_hash(path: &str) -> Option> { + FILE_INFO + .with(|file_info| Some(file_info.borrow().get(path)?.2.clone())) + .flatten() +} + +pub fn set_partial_hash(path: &str, hash: Vec) { + FILE_INFO.with(|file_hashes| { + if let Some(entry) = file_hashes.borrow_mut().get_mut(path) { + entry.2 = Some(hash); + } else { + panic!("Couldn't find file info for {}", path) + } + }); +} + +pub fn clear_file_info(path: &str) { + FILE_INFO.with(|file_info| file_info.borrow_mut().remove(path)); +} + +pub fn set_bytes_hashed(path: &str, bytes_hashed: u64) { + FILE_INFO.with(|file_info| { + let mut file_info_mut = file_info.borrow_mut(); + if let Some(file_info_entry) = file_info_mut.get_mut(path) { + file_info_entry.3 = bytes_hashed; + } + }); +} + +pub fn set_bytes_received(dest_path: &str, bytes_in_chunk: usize) -> u64 { + FILE_INFO.with(|total_bytes_received| { + let mut total_bytes_received_mut = total_bytes_received.borrow_mut(); + match total_bytes_received_mut.get_mut(dest_path) { + Some((_, total_bytes, _, _)) => { + *total_bytes += bytes_in_chunk as u64; + *total_bytes + } + None => panic!("Couldn't find file info for {}", dest_path), + } + }) +} + +pub fn get_timestamp(path: &str) -> Timestamp { + FILE_INFO.with(|uploaded_file_timestamps_map| { + match uploaded_file_timestamps_map.borrow().get(path) { + Some((timestamp, _, _, _)) => timestamp.clone(), + None => 0, + } + }) +} diff --git a/src/compiler/rust/canister/src/upload_file/hash.rs b/src/compiler/rust/canister/src/upload_file/hash.rs new file mode 100644 index 0000000000..3d1ac6aac0 --- /dev/null +++ b/src/compiler/rust/canister/src/upload_file/hash.rs @@ -0,0 +1,111 @@ +use std::collections::HashMap; + +use crate::chunk; +use crate::upload_file::bytes_to_human_readable; +use crate::upload_file::file_info; + +pub fn init_hashes() -> Result<(), std::io::Error> { + save_hashes(&HashMap::new()) +} + +pub fn get_file_hash(path: String) -> Option { + Some( + load_hashes() + .unwrap() + .get(&path)? + .iter() + .map(|bytes| format!("{:02x}", bytes)) + .collect(), + ) +} + +pub async fn hash_file(path: &str) { + clear_file_hash(path); + + let file_length = std::fs::metadata(path).unwrap().len(); + let limit = 75 * 1024 * 1024; // This limit will be determine by how much hashing an update method can do without running out of cycles. It runs out somewhere between 75 and 80 + // This limit must be the same as on the node side or else the hashes will not match + let mut file = std::fs::File::open(path).unwrap(); + + let mut position = 0; + + loop { + let percentage = position / file_length.max(1) * 100; + ic_cdk::println!( + "Hashing: {} | {}/{} : {:.2}%", + path, + bytes_to_human_readable::bytes_to_human_readable(position), + bytes_to_human_readable::bytes_to_human_readable(file_length), + percentage + ); + + std::io::Seek::seek(&mut file, std::io::SeekFrom::Start(position)).unwrap(); + + let mut buffer = vec![0; limit]; + let bytes_read = std::io::Read::read(&mut file, &mut buffer).unwrap(); + + let previous_hash = file_info::get_partial_file_hash(path); + + if bytes_read != 0 { + let new_hash = hash_chunk_with(&buffer, previous_hash.as_ref()); + file_info::set_partial_hash(path, new_hash); + file_info::set_bytes_hashed(path, position + bytes_read as u64); + position += bytes_read as u64; + chunk::chunk().await; + continue; + } else { + // No more bytes to hash, set as final hash for this file + let final_hash = match previous_hash { + Some(hash) => hash, + None => { + let empty_file_hash = hash_chunk_with(&[], None); + empty_file_hash + } + }; + chunk::chunk().await; + set_file_hash(path, final_hash); + file_info::clear_file_info(path); + break; + } + } +} + +fn clear_file_hash(path: &str) { + let mut file_hashes = load_hashes().unwrap(); + file_hashes.remove(path); + save_hashes(&file_hashes).unwrap(); +} + +fn set_file_hash(path: &str, hash: Vec) { + let mut file_hashes = load_hashes().unwrap(); + file_hashes.insert(path.to_string(), hash); + save_hashes(&file_hashes).unwrap(); +} + +fn get_file_hash_path() -> std::path::PathBuf { + std::path::Path::new(".config") + .join("azle") + .join("file_hashes.json") +} + +fn hash_chunk_with(data: &[u8], previous_hash: Option<&Vec>) -> Vec { + let mut h: sha2::Sha256 = sha2::Digest::new(); + sha2::Digest::update(&mut h, data); + if let Some(hash) = previous_hash { + sha2::Digest::update(&mut h, hash); + } + sha2::Digest::finalize(h).to_vec() +} + +fn load_hashes() -> Result>, std::io::Error> { + let file_hash_path = get_file_hash_path(); + let buffer = std::fs::read(file_hash_path)?; + + Ok(serde_json::from_slice(&buffer)?) +} + +fn save_hashes(file_hashes: &HashMap>) -> Result<(), std::io::Error> { + let data = serde_json::to_vec(file_hashes)?; + + std::fs::write(get_file_hash_path(), data) +} diff --git a/src/compiler/rust/canister/src/upload_file/mod.rs b/src/compiler/rust/canister/src/upload_file/mod.rs new file mode 100644 index 0000000000..b3ef987bb7 --- /dev/null +++ b/src/compiler/rust/canister/src/upload_file/mod.rs @@ -0,0 +1,13 @@ +use wasmedge_quickjs::AsObject; + +mod bytes_to_human_readable; +mod file_info; +mod hash; +mod reset; +mod upload_file_chunk; + +pub use hash::{get_file_hash, init_hashes}; +pub use reset::reset_for_new_upload; +pub use upload_file_chunk::upload_file_chunk; + +pub type Timestamp = u64; diff --git a/src/compiler/rust/canister/src/upload_file/reset.rs b/src/compiler/rust/canister/src/upload_file/reset.rs new file mode 100644 index 0000000000..ed82181a5d --- /dev/null +++ b/src/compiler/rust/canister/src/upload_file/reset.rs @@ -0,0 +1,15 @@ +use crate::upload_file::file_info; +use crate::upload_file::Timestamp; + +pub fn reset_for_new_upload(path: &str, timestamp: Timestamp) -> std::io::Result<()> { + delete_if_exists(path)?; + file_info::initialize_file_info(path, timestamp); + Ok(()) +} + +fn delete_if_exists(path: &str) -> std::io::Result<()> { + if std::fs::metadata(path).is_ok() { + std::fs::remove_file(path)?; + } + Ok(()) +} diff --git a/src/compiler/rust/canister/src/upload_file/upload_file_chunk.rs b/src/compiler/rust/canister/src/upload_file/upload_file_chunk.rs new file mode 100644 index 0000000000..34ca1b30ea --- /dev/null +++ b/src/compiler/rust/canister/src/upload_file/upload_file_chunk.rs @@ -0,0 +1,88 @@ +use crate::chunk; +use crate::upload_file::bytes_to_human_readable; +use crate::upload_file::file_info; +use crate::upload_file::hash; +use crate::upload_file::reset; +use crate::upload_file::Timestamp; + +pub async fn upload_file_chunk( + dest_path: String, + timestamp: u64, + start_index: u64, + file_bytes: Vec, + total_file_len: u64, +) { + let is_latest_version = check_if_latest_version(&dest_path, timestamp); + + if !is_latest_version { + return; + } + + let uploaded_file_len = + write_chunk(&dest_path, file_bytes, start_index, total_file_len).unwrap(); + + let percentage_complete = uploaded_file_len as f64 / total_file_len.max(1) as f64 * 100.0; + ic_cdk::println!( + "Received chunk: {} | {}/{} : {:.2}%", + dest_path, + bytes_to_human_readable::bytes_to_human_readable(uploaded_file_len), + bytes_to_human_readable::bytes_to_human_readable(total_file_len), + percentage_complete + ); + + if uploaded_file_len == total_file_len { + chunk::chunk().await; + hash::hash_file(&dest_path).await; + } +} + +fn check_if_latest_version(dest_path: &str, current_timestamp: Timestamp) -> bool { + let last_recorded_timestamp = file_info::get_timestamp(dest_path); + + if current_timestamp < last_recorded_timestamp { + // The request is from an earlier upload attempt. Disregard + return false; + } + + if current_timestamp > last_recorded_timestamp { + // The request is from a newer upload attempt. Clean up the previous attempt. + reset::reset_for_new_upload(dest_path, current_timestamp).unwrap(); + } + + true +} + +// Adds the given file_bytes to the chunked file at the chunk number position. +// Returns the new total length of chunked file after the addition +fn write_chunk( + path: &str, + file_bytes: Vec, + start_index: u64, + file_len: u64, +) -> std::io::Result { + match std::path::Path::new(path).parent() { + Some(dir_path) => std::fs::create_dir_all(dir_path)?, + None => (), //Dir doesn't need to be created + }; + + let mut file: std::fs::File = match std::fs::OpenOptions::new().write(true).open(path) { + Ok(file) => file, + Err(_) => init_file(path, file_len)?, + }; + + std::io::Seek::seek(&mut file, std::io::SeekFrom::Start(start_index))?; + std::io::Write::write_all(&mut file, &file_bytes)?; + drop(file); + + Ok(file_info::set_bytes_received(path, file_bytes.len())) +} + +fn init_file(path: &str, file_len: u64) -> std::io::Result { + let new_file = std::fs::OpenOptions::new() + .create(true) + .write(true) + .truncate(true) + .open(&path)?; + new_file.set_len(file_len)?; + Ok(new_file) +} diff --git a/src/compiler/rust/open_value_sharing/Cargo.toml b/src/compiler/rust/open_value_sharing/Cargo.toml index 52fefac0f2..c36e9ed955 100644 --- a/src/compiler/rust/open_value_sharing/Cargo.toml +++ b/src/compiler/rust/open_value_sharing/Cargo.toml @@ -10,7 +10,7 @@ candid = "0.10.8" ic-cdk = "0.12.1" ic-cdk-macros = "0.8.4" ic-cdk-timers = "0.6.0" -ic-stable-structures = "0.6.2" +ic-stable-structures = "0.6.5" anyhow = "1.0.86" [dev-dependencies] diff --git a/static_canister_template.wasm b/static_canister_template.wasm index d262d448d7..9066a61877 100644 Binary files a/static_canister_template.wasm and b/static_canister_template.wasm differ diff --git a/tests/end_to_end/http_server/large_files/dfx.json b/tests/end_to_end/http_server/large_files/dfx.json index f0e6bb0378..6c454581d4 100644 --- a/tests/end_to_end/http_server/large_files/dfx.json +++ b/tests/end_to_end/http_server/large_files/dfx.json @@ -2,7 +2,7 @@ "canisters": { "backend": { "type": "azle", - "main": "src/backend/index.ts", + "main": "src/index.ts", "assets": [ ["assets/auto", "assets"], ["assets/permanent", "assets"], diff --git a/tests/end_to_end/http_server/large_files/package-lock.json b/tests/end_to_end/http_server/large_files/package-lock.json index 7c8618fb3d..890d30fd7a 100644 --- a/tests/end_to_end/http_server/large_files/package-lock.json +++ b/tests/end_to_end/http_server/large_files/package-lock.json @@ -4,10 +4,12 @@ "requires": true, "packages": { "": { + "hasInstallScript": true, "dependencies": { "azle": "0.23.0", "express": "^4.18.2", - "uuid": "^9.0.1" + "mime-type": "^4.0.0", + "uuid": "^10.0.0" }, "devDependencies": { "@types/express": "^4.17.21", @@ -2326,6 +2328,18 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/azle/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/babel-jest": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", @@ -4095,6 +4109,11 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, + "node_modules/inherits-ex": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/inherits-ex/-/inherits-ex-1.6.0.tgz", + "integrity": "sha512-67sANrSoIvMmYDy0qyjmM/PvFdgBmWZVQoPBsRpDuP4tmlylEX1KdGN1bHvReG3eHBdaHY7WlZsrqys4y/cLVA==" + }, "node_modules/intl": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/intl/-/intl-1.2.5.tgz", @@ -5105,7 +5124,6 @@ "version": "4.0.7", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", - "dev": true, "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" @@ -5150,6 +5168,19 @@ "node": ">= 0.6" } }, + "node_modules/mime-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mime-type/-/mime-type-4.0.0.tgz", + "integrity": "sha512-1FCc9fsTg44pd7koB486WEepve+sc4847F0USUf08j4+bAU6/9ckIq4kHVEhCxbxHCyUZy++dxx/PtSR/m4XBQ==", + "dependencies": { + "micromatch": "^4.0.2", + "path.js": "^1.0.7", + "util-ex": "^0.3.15" + }, + "engines": { + "node": ">= 8.6" + } + }, "node_modules/mime-types": { "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", @@ -5478,6 +5509,24 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, + "node_modules/path.js": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path.js/-/path.js-1.0.7.tgz", + "integrity": "sha512-DPX1vNSmckC3figW8xT/fEuF+XBg/96RUpXfW0yT6UGHgQI2mtTSADflz45bXKREbz+5GJa0qDQGNIpNr1skRQ==", + "dependencies": { + "escape-string-regexp": "^1.0.3", + "inherits-ex": "^1.1.2", + "util-ex": "^0.3.10" + } + }, + "node_modules/path.js/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/pbkdf2": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", @@ -6849,6 +6898,15 @@ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, + "node_modules/util-ex": { + "version": "0.3.18", + "resolved": "https://registry.npmjs.org/util-ex/-/util-ex-0.3.18.tgz", + "integrity": "sha512-GPVjD257DtgCDMHYqbdWvZ+RY3HaXZ7Dps/44de5WscOjFNL2Qr+6dTIKGlyfA4A5BXyeFKWy8mb19OATWhh8Q==", + "dependencies": { + "inherits-ex": "^1.5.2", + "xtend": "^4.0.2" + } + }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -6858,9 +6916,9 @@ } }, "node_modules/uuid": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", "funding": [ "https://github.com/sponsors/broofa", "https://github.com/sponsors/ctavan" @@ -7013,6 +7071,14 @@ } } }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", @@ -8726,6 +8792,11 @@ "requires": { "brace-expansion": "^2.0.1" } + }, + "uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==" } } }, @@ -10085,6 +10156,11 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, + "inherits-ex": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/inherits-ex/-/inherits-ex-1.6.0.tgz", + "integrity": "sha512-67sANrSoIvMmYDy0qyjmM/PvFdgBmWZVQoPBsRpDuP4tmlylEX1KdGN1bHvReG3eHBdaHY7WlZsrqys4y/cLVA==" + }, "intl": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/intl/-/intl-1.2.5.tgz", @@ -10857,7 +10933,6 @@ "version": "4.0.7", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", - "dev": true, "requires": { "braces": "^3.0.3", "picomatch": "^2.3.1" @@ -10889,6 +10964,16 @@ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" }, + "mime-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mime-type/-/mime-type-4.0.0.tgz", + "integrity": "sha512-1FCc9fsTg44pd7koB486WEepve+sc4847F0USUf08j4+bAU6/9ckIq4kHVEhCxbxHCyUZy++dxx/PtSR/m4XBQ==", + "requires": { + "micromatch": "^4.0.2", + "path.js": "^1.0.7", + "util-ex": "^0.3.15" + } + }, "mime-types": { "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", @@ -11138,6 +11223,23 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, + "path.js": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path.js/-/path.js-1.0.7.tgz", + "integrity": "sha512-DPX1vNSmckC3figW8xT/fEuF+XBg/96RUpXfW0yT6UGHgQI2mtTSADflz45bXKREbz+5GJa0qDQGNIpNr1skRQ==", + "requires": { + "escape-string-regexp": "^1.0.3", + "inherits-ex": "^1.1.2", + "util-ex": "^0.3.10" + }, + "dependencies": { + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + } + } + }, "pbkdf2": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", @@ -11996,15 +12098,24 @@ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, + "util-ex": { + "version": "0.3.18", + "resolved": "https://registry.npmjs.org/util-ex/-/util-ex-0.3.18.tgz", + "integrity": "sha512-GPVjD257DtgCDMHYqbdWvZ+RY3HaXZ7Dps/44de5WscOjFNL2Qr+6dTIKGlyfA4A5BXyeFKWy8mb19OATWhh8Q==", + "requires": { + "inherits-ex": "^1.5.2", + "xtend": "^4.0.2" + } + }, "utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==" }, "uuid": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==" + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==" }, "v8-compile-cache-lib": { "version": "3.0.1", @@ -12107,6 +12218,11 @@ "integrity": "sha512-BWX0SWVgLPzYwF8lTzEy1egjhS4S4OEAHfsO8o65WOVsrnSRGaSiUaa9e0ggGlkMTtBlmOpEXiie9RUcBO86qg==", "requires": {} }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" + }, "y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", diff --git a/tests/end_to_end/http_server/large_files/package.json b/tests/end_to_end/http_server/large_files/package.json index 602e9f89c1..5bf6eb6ae3 100644 --- a/tests/end_to_end/http_server/large_files/package.json +++ b/tests/end_to_end/http_server/large_files/package.json @@ -1,12 +1,14 @@ { "scripts": { + "postinstall": "mkdir -p assets/auto", "pretest": "tsx test/pretest.ts", "test": "jest" }, "dependencies": { "azle": "0.23.0", "express": "^4.18.2", - "uuid": "^9.0.1" + "mime-type": "^4.0.0", + "uuid": "^10.0.0" }, "devDependencies": { "@types/express": "^4.17.21", diff --git a/tests/end_to_end/http_server/large_files/src/backend/index.ts b/tests/end_to_end/http_server/large_files/src/backend/index.ts deleted file mode 100644 index 57fb56b44d..0000000000 --- a/tests/end_to_end/http_server/large_files/src/backend/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { Server } from 'azle/experimental'; -import express, { Request } from 'express'; -import { existsSync, statSync } from 'fs'; - -export default Server(() => { - const app = express(); - - app.get('/exists', (req: Request, res) => { - res.send(existsSync(req.query.path)); - }); - - app.get('/size', (req: Request, res) => { - res.json(getFileSize(req.query.path)); - }); - - app.use(express.static('/assets')); - - return app.listen(); -}); - -function getFileSize(path: string): number { - try { - const stats = statSync(path); - if (stats.isFile()) { - return stats.size; - } else { - return 0; - } - } catch (err) { - return 0; - } -} diff --git a/tests/end_to_end/http_server/large_files/src/index.ts b/tests/end_to_end/http_server/large_files/src/index.ts new file mode 100644 index 0000000000..ddcc7cee56 --- /dev/null +++ b/tests/end_to_end/http_server/large_files/src/index.ts @@ -0,0 +1,67 @@ +import express, { Request } from 'express'; +import { access, readFile, stat } from 'fs/promises'; +import mime from 'mime'; +import { extname } from 'path'; + +import { ls } from './list_files'; + +const app = express(); +app.use(express.json()); + +app.get( + '/', + async ( + req: Request, + res + ) => { + res.send(`
${await ls(req.query.path, req.query.recursive)}
`); + } +); + +app.get( + '/exists', + async (req: Request, res) => { + try { + await access(req.query.path); + res.send(true); + } catch { + res.send(false); + } + } +); + +app.get('/size', async (req: Request, res) => { + const size = await getFileSize(req.query.path); + res.json(size); +}); + +app.get( + '/read-file', + async (req: Request, res) => { + const filePath = req.query.path; + const fileExt = extname(filePath); + const mimeType = mime.lookup(fileExt) || 'application/octet-stream'; + + const content = await readFile(filePath); + + res.setHeader('Content-Type', mimeType); + res.send(content); + } +); + +app.use(express.static('/assets')); + +app.listen(); + +async function getFileSize(filePath: string): Promise { + try { + const stats = await stat(filePath); + if (stats.isFile()) { + return stats.size; + } else { + return 0; + } + } catch { + return 0; + } +} diff --git a/tests/end_to_end/http_server/large_files/src/list_files.ts b/tests/end_to_end/http_server/large_files/src/list_files.ts new file mode 100644 index 0000000000..07c22287c8 --- /dev/null +++ b/tests/end_to_end/http_server/large_files/src/list_files.ts @@ -0,0 +1,105 @@ +import { id } from 'azle'; +import { readdir, stat } from 'fs/promises'; +import { basename, join } from 'path'; + +export async function ls( + path: string = '.', + recursive: boolean = false +): Promise { + const stats = await stat(path); + + if (!stats.isDirectory()) { + return `
    ${await createHtmlListItem(path, false)}
\n`; + } + + let result = '
    '; + const items = await sortDirContents(path); + for (const item of items) { + const fullPath = join(path, item); + const isDirectory = (await stat(fullPath)).isDirectory(); + result += await createHtmlListItem(fullPath, isDirectory); + const stats = await stat(fullPath); + if (recursive && stats.isDirectory()) { + result += `${await ls(fullPath, recursive)}`; + } + } + + result += '
'; + return result; +} + +async function createHtmlListItem( + path: string, + isDirectory: boolean +): Promise { + const link = isDirectory + ? `/?path=${encodeURIComponent(path)}` + : `/read-file?path=${encodeURIComponent(path)}`; + const title = await getFileDetails(path); + return `
  • ${basename(path)}
  • `; +} + +async function getFileDetails(filePath: string): Promise { + // Get file statistics + const stats = await stat(filePath); + + // Get file permissions in symbolic notation + const permissions = (stats.mode & 0o777).toString(8); + const symbolicPermissions = permissions.replace(/./g, (d: string) => { + const lookup: { [key: string]: string } = { + '0': '---', + '1': '--x', + '2': '-w-', + '3': '-wx', + '4': 'r--', + '5': 'r-x', + '6': 'rw-', + '7': 'rwx' + }; + return lookup[d] || '-'; + }); + const dir = stats.isDirectory() ? 'd' : '-'; + const fullPermissions = `${dir}${symbolicPermissions}`; + + // Get the number of hard links + const hardLinks = stats.nlink; + + // Get owner and group names using 'id' command + const owner = id(); + + // Get file size + const size = stats.size.toString(); + + // Get last modification time + const modTime = new Date(stats.mtime); + const month = modTime.toLocaleString('default', { month: 'short' }); + const day = modTime.getDate(); + const time = `${modTime.getHours()}:${modTime.getMinutes()}`; + + // Format the output + return `${fullPermissions} ${hardLinks} ${owner} ${size} ${month} ${day} ${time} ${basename( + filePath + )}`; +} + +async function sortDirContents(currentPath: string): Promise { + const items = await readdir(currentPath); + + const directories: string[] = []; + const files: string[] = []; + + for (const item of items) { + const fullPath = join(currentPath, item); + const stats = await stat(fullPath); + if (stats.isDirectory()) { + directories.push(item); + } else { + files.push(item); + } + } + + directories.sort((a, b) => a.localeCompare(b)); + files.sort((a, b) => a.localeCompare(b)); + + return [...directories, ...files]; +} diff --git a/tests/end_to_end/http_server/large_files/test/authorization_tests.ts b/tests/end_to_end/http_server/large_files/test/authorization_tests.ts index 7d023b9929..0d2d57e430 100644 --- a/tests/end_to_end/http_server/large_files/test/authorization_tests.ts +++ b/tests/end_to_end/http_server/large_files/test/authorization_tests.ts @@ -19,7 +19,7 @@ export function getAuthorizationTests(): Test { ); await expect( - actor.upload_file_chunk( + actor._azle_upload_file_chunk( destPath, 0n, 0n, @@ -31,35 +31,25 @@ export function getAuthorizationTests(): Test { ); }); - it('fails to get hash status from an unauthorized actor', async () => { + it('fails to get hash from an unauthorized actor', async () => { const actor = await createActor( getCanisterId('backend'), unauthorizedUser ); await expect( - actor.get_hash_status('assets/test0B') + actor._azle_get_file_hash('assets/test0B') ).rejects.toThrow( /Not Authorized: only controllers of this canister may call this method/ ); }); - it('fails to get hash from an unauthorized actor', async () => { - const actor = await createActor( - getCanisterId('backend'), - unauthorizedUser - ); - await expect(actor.get_file_hash('assets/test0B')).rejects.toThrow( - /Not Authorized: only controllers of this canister may call this method/ - ); - }); - it('fails to clear file and info from an unauthorized actor', async () => { const actor = await createActor( getCanisterId('backend'), unauthorizedUser ); await expect( - actor.clear_file_and_info('assets/test0B') + actor._azle_clear_file_and_info('assets/test0B') ).rejects.toThrow( /Not Authorized: only controllers of this canister may call this method/ ); diff --git a/tests/end_to_end/http_server/large_files/test/huge_file_tests.ts b/tests/end_to_end/http_server/large_files/test/huge_file_tests.ts index f7c883fe65..47be7ecfa1 100644 --- a/tests/end_to_end/http_server/large_files/test/huge_file_tests.ts +++ b/tests/end_to_end/http_server/large_files/test/huge_file_tests.ts @@ -1,27 +1,21 @@ import { describe } from '@jest/globals'; import { please, Test } from 'azle/test'; import { execSync } from 'child_process'; -import { rm } from 'fs-extra'; import { join } from 'path'; import { Unit } from '../../../../../scripts/file_generator'; import { generateTestFileOfSize } from './generate_test_files'; import { getAutoGeneratedFileName, verifyUpload } from './tests'; -const hugeAutoGenAutoUploadFileInfos: [number, Unit][] = [[2, 'GiB']]; +const hugeAutoGenAutoUploadSmallFileInfos: [number, Unit][] = [[0, 'GiB']]; // The tests will fail if this array is empty, so for AZLE_QUICK_TEST we will have a dummy entry + +const hugeAutoGenAutoUploadFileInfos: [number, Unit][] = + process.env.AZLE_QUICK_TEST === 'true' + ? hugeAutoGenAutoUploadSmallFileInfos + : [...hugeAutoGenAutoUploadSmallFileInfos, [2, 'GiB'], [5, 'GiB']]; export function hugeFilesTests(origin: string): Test { return () => { - please( - 'remove all other auto generated files so there is room for huge files', - async () => { - await rm(join('assets', 'auto'), { - recursive: true, - force: true - }); - } - ); - describe.each(hugeAutoGenAutoUploadFileInfos)( 'generate huge files', (size, units) => { @@ -31,25 +25,28 @@ export function hugeFilesTests(origin: string): Test { async () => { await generateTestFileOfSize(size, units); }, - 10 * 60 * 1_000 + // TODO fix these numbers when we know them better + (10 + 20 + 40) * 60 * 1_000 ); } ); please( - 'redeploy the canister to remove files and reupload', + 'redeploy the canister to reupload', async () => { execSync(`dfx deploy --upgrade-unchanged`, { stdio: 'inherit' }); - } + }, + (40 + 40) * 60 * 1_000 + // TODO fix these numbers when we know them better ); describe.each(hugeAutoGenAutoUploadFileInfos)( 'verify huge files were uploaded correctly', (size, units) => { const fileName = getAutoGeneratedFileName(size, units); - verifyUpload(origin, join('auto', fileName), fileName); + verifyUpload(origin, join('auto', fileName), fileName, 500_000); } ); }; diff --git a/tests/end_to_end/http_server/large_files/test/tests.ts b/tests/end_to_end/http_server/large_files/test/tests.ts index 4316d890b1..dfbe924d9d 100644 --- a/tests/end_to_end/http_server/large_files/test/tests.ts +++ b/tests/end_to_end/http_server/large_files/test/tests.ts @@ -4,7 +4,7 @@ import { hashFile } from 'azle/scripts/hash_file'; import { createActor } from 'azle/src/compiler/file_uploader/uploader_actor'; import { expect, it, please, Test } from 'azle/test'; import { execSync } from 'child_process'; -import { rm } from 'fs/promises'; +import { readdir, rm } from 'fs/promises'; import { join } from 'path'; import { Unit } from '../../../../../scripts/file_generator'; @@ -21,19 +21,11 @@ export function getTests(canisterId: string): Test { return () => { beforeAll(async () => { // Ensure all files from previous runs are cleared out - await rm(join('assets', 'auto'), { recursive: true, force: true }); - await rm(join('assets', 'manual'), { - recursive: true, - force: true - }); + await deleteAutoGeneratedFiles(); }); afterAll(async () => { // Clear out files from this run - await rm(join('assets', 'auto'), { recursive: true, force: true }); - await rm(join('assets', 'manual'), { - recursive: true, - force: true - }); + await deleteAutoGeneratedFiles(); }); describe('generate files', generateFiles()); @@ -70,17 +62,34 @@ export function getTests(canisterId: string): Test { 15 * 60 * 1_000 ); - // TODO right now the test can not tell if the files are there because they were uploaded again or if they are there because they were in stable memory. It would be good to develop a test to determine that. describe( 'verify files specified in dfx.json exist after redeploy', getDfxConfigFileTests(origin) ); + please( + 'redeploy with no upload', + async () => { + execSync( + `AZLE_DISABLE_AUTO_FILE_UPLOAD=true dfx deploy --upgrade-unchanged`, + { + stdio: 'inherit' + } + ); + }, + 1 * 60 * 1_000 + ); + + describe( + 'verify files specified in dfx.json exist after redeploy even with file uploading disabled', + getDfxConfigFileTests(origin) + ); + describe('manual upload tests', manualTests(origin)); // Run the huge file tests only once at the end so they don't slow down the rest of the test process // TODO CI CD isn't working with the 2GiB or bigger tests so we're just going to have this one for local tests. - describe.skip('huge files tests', hugeFilesTests(origin)); + describe('huge files tests', hugeFilesTests(origin)); }; } @@ -102,25 +111,45 @@ export function getAutoGeneratedFileName(size: number, units: Unit): string { export function verifyUpload( origin: string, srcPath: string, - destPath: string + destPath: string, + timeout?: number ): void { - it(`uploads and hashes ${srcPath}`, async () => { - const localPath = join('assets', srcPath); - const canisterPath = join('assets', destPath); - - const expectedHash = (await hashFile(localPath)).toString('hex'); - - const response = await fetch(`${origin}/exists?path=${canisterPath}`); - const exists = await response.json(); - - expect(exists).toBe(true); - - const actor = await createActor( - getCanisterId('backend'), - AZLE_UPLOADER_IDENTITY_NAME - ); - const hash = await actor.get_file_hash(canisterPath); + it( + `uploads and hashes ${srcPath}`, + async () => { + const localPath = join('assets', srcPath); + const canisterPath = join('assets', destPath); + + const expectedHash = (await hashFile(localPath)).toString('hex'); + + const response = await fetch( + `${origin}/exists?path=${canisterPath}` + ); + const exists = await response.json(); + + expect(exists).toBe(true); + + const actor = await createActor( + getCanisterId('backend'), + AZLE_UPLOADER_IDENTITY_NAME + ); + const hash = await actor._azle_get_file_hash(canisterPath); + + expect(hash).toStrictEqual([expectedHash]); + }, + timeout + ); +} - expect(hash).toStrictEqual([expectedHash]); +async function deleteAutoGeneratedFiles(): Promise { + const autoDir = join('assets', 'auto'); + const files = await readdir(autoDir); + // Since "assets/auto" is in the dfx.json we don't want to delete the folder itself or else the canister will fail to build + for (const item of files) { + await rm(join(autoDir, item), { recursive: true, force: true }); + } + await rm(join('assets', 'manual'), { + recursive: true, + force: true }); }