diff --git a/examples/large_files/test/auto_tests.ts b/examples/large_files/test/auto_tests.ts index d424ef19a7..b97a1a49ff 100644 --- a/examples/large_files/test/auto_tests.ts +++ b/examples/large_files/test/auto_tests.ts @@ -1,11 +1,12 @@ import { describe } from '@jest/globals'; import { please, Test } from 'azle/test/jest'; +import { join } from 'path'; import { Unit } from '../../../scripts/file_generator'; import { generateTestFileOfSize } from './generate_test_files'; import { getAutoGeneratedFileName, verifyUpload } from './tests'; -const autoGenAutoUploadTests: [number, Unit][] = [ +const autoGenAutoUploadFileInfo: [number, Unit][] = [ // Edge Cases [0, 'B'], [1, 'B'], @@ -13,7 +14,6 @@ const autoGenAutoUploadTests: [number, Unit][] = [ [2_000_000 + 1, 'B'], // One more byte that the message chunk size // General Cases - // TODO Add tests for huge files after https://github.com/wasm-forge/stable-fs/issues/2 is resolved [1, 'KiB'], [10, 'KiB'], [100, 'KiB'], @@ -24,27 +24,26 @@ const autoGenAutoUploadTests: [number, Unit][] = [ [1, 'GiB'] ]; -const permanentFilesTests: [string][] = [ - // Permanent Assets - ['photos/people/george-washington.tif'], - ['photos/places/dinosaurNM.jpg'], - ['photos/places/slc.jpg'], - ['photos/things/book.jpg'], - ['photos/things/utah-teapot.jpg'], - ['text/subfolder/deep-sub-folder/deep.txt'], - ['text/subfolder/sibling-deep-sub-folder/deep.txt'], - ['text/subfolder/other-thing.txt'], - ['text/thing.txt'], - ['text/thing.txt'] +const permanentFiles: string[] = [ + 'photos/people/george-washington.tif', + 'photos/places/dinosaurNM.jpg', + 'photos/places/slc.jpg', + 'photos/things/book.jpg', + 'photos/things/utah-teapot.jpg', + 'text/subfolder/deep-sub-folder/deep.txt', + 'text/subfolder/sibling-deep-sub-folder/deep.txt', + 'text/subfolder/other-thing.txt', + 'text/thing.txt', + 'text/thing.txt' ]; -const fileRenameDuringUploadTests: [string, string][] = - // TODO this is a bit of a pickle one - [['text/single.txt', 'single_asset.txt']]; +const renamedPermanentFiles: [string, string][] = [ + ['text/single.txt', 'single_asset.txt'] +]; export function generateFiles(): Test { return () => { - describe.each(autoGenAutoUploadTests)( + describe.each(autoGenAutoUploadFileInfo)( 'prepare auto generated files locally for upload', (size, units) => { please( @@ -69,25 +68,29 @@ export function generateFiles(): Test { */ export function getDfxConfigFileTests(origin: string): Test { return () => { - describe.each(permanentFilesTests)( - 'permanent files with same names locally as on canister', + describe.each(permanentFiles)( + 'permanent files with same names locally as on canister that were uploaded as canister was deployed', (canisterPath) => { - verifyUpload(origin, canisterPath, 'permanent'); + verifyUpload( + origin, + join('permanent', canisterPath), + canisterPath + ); } ); - describe.each(fileRenameDuringUploadTests)( - 'permanent files with different names locally than on canister', + describe.each(renamedPermanentFiles)( + 'permanent files with different names locally than on canister that were uploaded as canister was deployed', (canisterPath, localPath) => { - verifyUpload(origin, canisterPath, undefined, localPath); + verifyUpload(origin, localPath, canisterPath); } ); - describe.each(autoGenAutoUploadTests)( + describe.each(autoGenAutoUploadFileInfo)( 'auto generated files that were uploaded as canister was deployed', (size, units) => { const fileName = getAutoGeneratedFileName(size, units); - verifyUpload(origin, fileName, 'auto'); + verifyUpload(origin, 'auto', fileName); } ); }; diff --git a/examples/large_files/test/huge_file_tests.ts b/examples/large_files/test/huge_file_tests.ts index 18d29e61d6..7fd0cf42fd 100644 --- a/examples/large_files/test/huge_file_tests.ts +++ b/examples/large_files/test/huge_file_tests.ts @@ -8,7 +8,7 @@ import { Unit } from '../../../scripts/file_generator'; import { generateTestFileOfSize } from './generate_test_files'; import { getAutoGeneratedFileName, verifyUpload } from './tests'; -const hugeAutoGenAutoUploadTests: [number, Unit][] = [[2, 'GiB']]; +const hugeAutoGenAutoUploadFileInfo: [number, Unit][] = [[2, 'GiB']]; export function hugeFilesTests(origin: string): Test { return () => { @@ -22,7 +22,7 @@ export function hugeFilesTests(origin: string): Test { } ); - describe.each(hugeAutoGenAutoUploadTests)( + describe.each(hugeAutoGenAutoUploadFileInfo)( 'generate huge files', (size, units) => { const fileName = getAutoGeneratedFileName(size, units); @@ -45,11 +45,11 @@ export function hugeFilesTests(origin: string): Test { } ); - describe.each(hugeAutoGenAutoUploadTests)( + describe.each(hugeAutoGenAutoUploadFileInfo)( 'verify huge files were uploaded correctly', (size, units) => { const fileName = getAutoGeneratedFileName(size, units); - verifyUpload(origin, fileName, 'auto', undefined); + verifyUpload(origin, 'auto', fileName); } ); }; diff --git a/examples/large_files/test/manual_tests.ts b/examples/large_files/test/manual_tests.ts index b6388ddf96..ac93b97ee4 100644 --- a/examples/large_files/test/manual_tests.ts +++ b/examples/large_files/test/manual_tests.ts @@ -8,10 +8,10 @@ import { generateTestFileOfSize } from './generate_test_files'; import { getAutoGeneratedFileName, verifyUpload } from './tests'; export function manualTests(origin: string): Test { - const autoGenManualUploadTests: [number, Unit][] = [[150, 'MiB']]; + const autoGenManualUploadFileInfo: [number, Unit][] = [[150, 'MiB']]; return () => { - describe.each(autoGenManualUploadTests)( + describe.each(autoGenManualUploadFileInfo)( 'prepare auto generated files locally for manual upload', (size, units) => { const fileName = getAutoGeneratedFileName(size, units); @@ -20,12 +20,12 @@ export function manualTests(origin: string): Test { async () => { await generateTestFileOfSize(size, units, 'manual'); }, - 2 * 60 * 1_000 + 5 * 60 * 1_000 ); } ); - describe.each(autoGenManualUploadTests)( + describe.each(autoGenManualUploadFileInfo)( 'initial manual upload of auto files', (size, units) => { const fileName = getAutoGeneratedFileName(size, units); @@ -50,11 +50,11 @@ export function manualTests(origin: string): Test { } ); - describe.each(autoGenManualUploadTests)( + describe.each(autoGenManualUploadFileInfo)( 'initial manual upload of auto files', (size, units) => { const fileName = getAutoGeneratedFileName(size, units); - verifyUpload(origin, fileName, 'manual'); + verifyUpload(origin, 'manual', fileName); } ); }; diff --git a/examples/large_files/test/pretest.ts b/examples/large_files/test/pretest.ts index b142fe0ab0..355ccae484 100644 --- a/examples/large_files/test/pretest.ts +++ b/examples/large_files/test/pretest.ts @@ -5,6 +5,10 @@ async function pretest() { stdio: 'inherit' }); + // Since a lot of this test revolves around the upload process and the post + // install scripts the call to dfx deploy is inside the tests. Canister + // create is called here to make sure we have the right canister id for the + // origin execSync(`dfx canister create backend`); } diff --git a/examples/large_files/test/tests.ts b/examples/large_files/test/tests.ts index a8946e54cc..952d241924 100644 --- a/examples/large_files/test/tests.ts +++ b/examples/large_files/test/tests.ts @@ -5,7 +5,8 @@ import { createActor } from 'azle/src/compiler/file_uploader/uploader_actor'; import { expect, it, please, Test } from 'azle/test/jest'; import { execSync } from 'child_process'; import { rm } from 'fs/promises'; -import { join } from 'path'; +import { stat } from 'fs/promises'; +import { basename, join } from 'path'; import { Unit } from '../../../scripts/file_generator'; import { AZLE_UPLOADER_IDENTITY_NAME } from '../../../src/compiler/uploader_identity'; @@ -22,10 +23,18 @@ export function getTests(canisterId: string): Test { beforeAll(async () => { // Ensure all files from previous runs are cleared out await rm(join('assets', 'auto'), { recursive: true, force: true }); + await rm(join('assets', 'manual'), { + recursive: true, + force: true + }); }); afterAll(async () => { // Clear out files from this run await rm(join('assets', 'auto'), { recursive: true, force: true }); + await rm(join('assets', 'manual'), { + recursive: true, + force: true + }); }); describe('generate files', generateFiles()); @@ -45,20 +54,13 @@ export function getTests(canisterId: string): Test { describe('authorization tests', getAuthorizationTests()); describe( - 'verify files specified in dfx.json exists after initial deploy', + 'verify files specified in dfx.json exist after initial deploy', getDfxConfigFileTests(origin) ); please( 'modify files and redeploy', async () => { - // TODO Upgrading is not working at the moment. This is the work around. - // TODO We the post install script runs it errors because the files already exist. - // TODO look into why those files seem to still exist - execSync(`dfx canister uninstall-code backend || true`, { - stdio: 'inherit' - }); - await generateTestFileOfSize(1, 'KiB'); await generateTestFileOfSize(10, 'KiB'); await generateTestFileOfSize(100, 'KiB'); @@ -95,6 +97,10 @@ export function getAutoGeneratedFileName(size: number, units: Unit): string { * will be inserted between "assets" and the rest of the file path to the local * asset. If localPath is defined it will be used for the localPath. Otherwise * it will be assumed that the canisterPath is the same as the localPath. + * + * TODO if this works out update this comment to say that the destPath must be + * a full file path but that src can be a directory... + * * @param origin * @param canisterPath * @param localDir @@ -103,23 +109,19 @@ export function getAutoGeneratedFileName(size: number, units: Unit): string { */ export function verifyUpload( origin: string, - canisterPath: string, - localDir?: string, - localPath?: string + srcPath: string, + destPath: string ) { - it(`uploads and hashes ${canisterPath}`, async () => { - const canisterFilePath = join('assets', canisterPath); - const localFilePath = join( - 'assets', - localDir ?? '', - localPath ?? canisterPath - ); + it(`uploads and hashes ${srcPath}`, async () => { + const localPath = await getLocalPath(join('assets', srcPath), destPath); + console.log(`Src: ${srcPath} => Local: ${localPath}`); + const canisterPath = join('assets', destPath); + console.log(`Src: ${destPath} => Local: ${canisterPath}`); + console.log('THIS IS THE CANISTER PATH', canisterPath); - const expectedHash = (await hashFile(localFilePath)).toString('hex'); + const expectedHash = (await hashFile(localPath)).toString('hex'); - const response = await fetch( - `${origin}/exists?path=${canisterFilePath}` - ); + const response = await fetch(`${origin}/exists?path=${canisterPath}`); const exists = await response.json(); expect(exists).toBe(true); @@ -128,8 +130,17 @@ export function verifyUpload( getCanisterId('backend'), AZLE_UPLOADER_IDENTITY_NAME ); - const hash = await actor.get_file_hash(canisterFilePath); + const hash = await actor.get_file_hash(canisterPath); expect(hash).toStrictEqual([expectedHash]); }); } + +async function getLocalPath(srcPath: string, dest: string): Promise { + const srcStats = await stat(srcPath); + if (srcStats.isDirectory()) { + const fileName = basename(dest); + return join(srcPath, fileName); + } + return srcPath; +}