diff --git a/.changeset/eight-schools-repair.md b/.changeset/eight-schools-repair.md new file mode 100644 index 00000000..5f77509d --- /dev/null +++ b/.changeset/eight-schools-repair.md @@ -0,0 +1,5 @@ +--- +"@cartesi/cli": major +--- + +build based on cartesi.toml diff --git a/apps/cli/.gitignore b/apps/cli/.gitignore index 7f3d26c3..06f62fa3 100644 --- a/apps/cli/.gitignore +++ b/apps/cli/.gitignore @@ -9,3 +9,4 @@ node_modules oclif.manifest.json src/contracts.ts src/graphql/ +test/builder/output diff --git a/apps/cli/package.json b/apps/cli/package.json index 4055cb85..3332d448 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -37,6 +37,7 @@ "ora": "^8.1.0", "progress-stream": "^2.0.0", "semver": "^7.6.3", + "smol-toml": "^1.3.0", "tmp": "^0.2.3", "viem": "^2.21.27" }, diff --git a/apps/cli/src/baseCommand.ts b/apps/cli/src/baseCommand.ts index dc0f243f..cf618c45 100644 --- a/apps/cli/src/baseCommand.ts +++ b/apps/cli/src/baseCommand.ts @@ -5,6 +5,7 @@ import fs from "fs"; import path from "path"; import { Address, Hash, getAddress, isHash } from "viem"; +import { Config, parse } from "./config.js"; import { applicationFactoryAddress, authorityFactoryAddress, @@ -53,6 +54,12 @@ export abstract class BaseCommand extends Command { return path.join(".cartesi", ...paths); } + protected getApplicationConfig(configPath: string): Config { + return fs.existsSync(configPath) + ? parse(fs.readFileSync(configPath).toString()) + : parse(""); + } + protected getMachineHash(): Hash | undefined { // read hash of the cartesi machine snapshot, if one exists const hashPath = this.getContextPath("image", "hash"); diff --git a/apps/cli/src/builder/directory.ts b/apps/cli/src/builder/directory.ts new file mode 100644 index 00000000..f67dedbd --- /dev/null +++ b/apps/cli/src/builder/directory.ts @@ -0,0 +1,45 @@ +import fs from "fs-extra"; +import path from "path"; +import { DirectoryDriveConfig } from "../config.js"; +import { genext2fs, mksquashfs } from "../exec/index.js"; + +export const build = async ( + name: string, + drive: DirectoryDriveConfig, + sdkImage: string, + destination: string, +): Promise => { + const filename = `${name}.${drive.format}`; + + // copy directory to destination + const dest = path.join(destination, name); + await fs.mkdirp(dest); + await fs.copy(drive.directory, dest); + + try { + switch (drive.format) { + case "ext2": { + await genext2fs.fromDirectory({ + extraSize: drive.extraSize, + input: name, + output: filename, + cwd: destination, + image: sdkImage, + }); + break; + } + case "sqfs": { + await mksquashfs.fromDirectory({ + input: name, + output: filename, + cwd: destination, + image: sdkImage, + }); + break; + } + } + } finally { + // delete copied + await fs.remove(dest); + } +}; diff --git a/apps/cli/src/builder/docker.ts b/apps/cli/src/builder/docker.ts new file mode 100644 index 00000000..98826482 --- /dev/null +++ b/apps/cli/src/builder/docker.ts @@ -0,0 +1,138 @@ +import { execa } from "execa"; +import fs from "fs-extra"; +import path from "path"; +import tmp from "tmp"; +import { DockerDriveConfig } from "../config.js"; +import { crane, genext2fs, mksquashfs } from "../exec/index.js"; + +type ImageBuildOptions = Pick< + DockerDriveConfig, + "context" | "dockerfile" | "tags" | "target" +>; + +type ImageInfo = { + cmd: string[]; + entrypoint: string[]; + env: string[]; + workdir: string; +}; + +/** + * Build Docker image (linux/riscv64). Returns image id. + */ +const buildImage = async (options: ImageBuildOptions): Promise => { + const { context, dockerfile, tags, target } = options; + const buildResult = tmp.tmpNameSync(); + const args = [ + "buildx", + "build", + "--file", + dockerfile, + "--load", + "--iidfile", + buildResult, + context, + ]; + + // set tags for the image built + args.push(...tags.map((tag) => ["--tag", tag]).flat()); + + if (target) { + args.push("--target", target); + } + + await execa("docker", args, { stdio: "inherit" }); + return fs.readFileSync(buildResult, "utf8"); +}; + +/** + * Query the image using docker image inspect + * @param image image id or name + * @returns Information about the image + */ +const getImageInfo = async (image: string): Promise => { + const { stdout: jsonStr } = await execa("docker", [ + "image", + "inspect", + image, + ]); + // parse image info from docker inspect output + const [imageInfo] = JSON.parse(jsonStr); + + // validate image architecture (must be riscv64) + if (imageInfo["Architecture"] !== "riscv64") { + throw new Error( + `Invalid image Architecture: ${imageInfo["Architecture"]}. Expected riscv64`, + ); + } + + const info: ImageInfo = { + cmd: imageInfo["Config"]["Cmd"] ?? [], + entrypoint: imageInfo["Config"]["Entrypoint"] ?? [], + env: imageInfo["Config"]["Env"] || [], + workdir: imageInfo["Config"]["WorkingDir"], + }; + + return info; +}; + +export const build = async ( + name: string, + drive: DockerDriveConfig, + sdkImage: string, + destination: string, +): Promise => { + const { format } = drive; + + const ocitar = `${name}.oci.tar`; + const tar = `${name}.tar`; + const filename = `${name}.${format}`; + + // use pre-existing image or build docker image + const image = drive.image || (await buildImage(drive)); + + // get image info + const imageInfo = await getImageInfo(image); + + try { + // create OCI Docker tarball from Docker image + await execa("docker", ["image", "save", image, "-o", ocitar], { + cwd: destination, + }); + + // create rootfs tar from OCI tar + await crane.exportImage({ + stdin: fs.openSync(path.join(destination, ocitar), "r"), + stdout: fs.openSync(path.join(destination, tar), "w"), + image: sdkImage, + }); + + switch (format) { + case "ext2": { + await genext2fs.fromTar({ + extraSize: drive.extraSize, + input: tar, + output: filename, + cwd: destination, + image: sdkImage, + }); + break; + } + case "sqfs": { + await mksquashfs.fromTar({ + input: path.join(destination, tar), + output: filename, + cwd: destination, + image: sdkImage, + }); + break; + } + } + } finally { + // delete intermediate files + await fs.remove(path.join(destination, ocitar)); + await fs.remove(path.join(destination, tar)); + } + + return imageInfo; +}; diff --git a/apps/cli/src/builder/empty.ts b/apps/cli/src/builder/empty.ts new file mode 100644 index 00000000..1e343a61 --- /dev/null +++ b/apps/cli/src/builder/empty.ts @@ -0,0 +1,31 @@ +import fs from "fs-extra"; +import path from "path"; +import { EmptyDriveConfig } from "../config.js"; +import { genext2fs } from "../exec/index.js"; + +export const build = async ( + name: string, + drive: EmptyDriveConfig, + sdkImage: string, + destination: string, +): Promise => { + const filename = `${name}.${drive.format}`; + switch (drive.format) { + case "ext2": { + await genext2fs.empty({ + output: filename, + size: drive.size, + cwd: destination, + image: sdkImage, + }); + break; + } + case "raw": { + await fs.writeFile( + path.join(destination, filename), + Buffer.alloc(drive.size), + ); + break; + } + } +}; diff --git a/apps/cli/src/builder/index.ts b/apps/cli/src/builder/index.ts new file mode 100644 index 00000000..cf1a5225 --- /dev/null +++ b/apps/cli/src/builder/index.ts @@ -0,0 +1,5 @@ +export { build as buildDirectory } from "./directory.js"; +export { build as buildDocker } from "./docker.js"; +export { build as buildEmpty } from "./empty.js"; +export { build as buildNone } from "./none.js"; +export { build as buildTar } from "./tar.js"; diff --git a/apps/cli/src/builder/none.ts b/apps/cli/src/builder/none.ts new file mode 100644 index 00000000..2e0d8fea --- /dev/null +++ b/apps/cli/src/builder/none.ts @@ -0,0 +1,17 @@ +import fs from "fs-extra"; +import path from "path"; +import { ExistingDriveConfig, getDriveFormat } from "../config.js"; + +export const build = async ( + name: string, + drive: ExistingDriveConfig, + destination: string, +): Promise => { + // no need to build, drive already exists + const src = drive.filename; + const format = getDriveFormat(src); + const filename = path.join(destination, `${name}.${format}`); + + // just copy it + await fs.copyFile(src, filename); +}; diff --git a/apps/cli/src/builder/tar.ts b/apps/cli/src/builder/tar.ts new file mode 100644 index 00000000..9f9e13b2 --- /dev/null +++ b/apps/cli/src/builder/tar.ts @@ -0,0 +1,39 @@ +import fs from "fs-extra"; +import path from "path"; +import { TarDriveConfig } from "../config.js"; +import { genext2fs, mksquashfs } from "../exec/index.js"; + +export const build = async ( + name: string, + drive: TarDriveConfig, + sdkImage: string, + destination: string, +): Promise => { + const tar = `${name}.tar`; + const filename = `${name}.${drive.format}`; + + // copy input tar to destination directory (with drive name) + await fs.copy(drive.filename, path.join(destination, tar)); + + switch (drive.format) { + case "ext2": { + await genext2fs.fromTar({ + extraSize: drive.extraSize, + input: tar, + output: filename, + cwd: destination, + image: sdkImage, + }); + break; + } + case "sqfs": { + await mksquashfs.fromTar({ + input: path.join(destination, tar), + output: filename, + cwd: destination, + image: sdkImage, + }); + break; + } + } +}; diff --git a/apps/cli/src/commands/build.ts b/apps/cli/src/commands/build.ts index 33710810..62d5b7be 100644 --- a/apps/cli/src/commands/build.ts +++ b/apps/cli/src/commands/build.ts @@ -1,312 +1,104 @@ import { Flags } from "@oclif/core"; -import bytes from "bytes"; -import { execa } from "execa"; import fs from "fs-extra"; -import semver from "semver"; +import path from "path"; import tmp from "tmp"; - import { BaseCommand } from "../baseCommand.js"; -import { DEFAULT_TEMPLATES_BRANCH } from "./create.js"; - -type ImageBuildOptions = { - target?: string; -}; - -type ImageInfo = { - cmd: string[]; - dataSize: string; - entrypoint: string[]; - env: string[]; - ramSize: string; - sdkVersion: string; - sdkName: string; - workdir: string; +import { + buildDirectory, + buildDocker, + buildEmpty, + buildNone, + buildTar, +} from "../builder/index.js"; +import { DriveConfig, DriveResult } from "../config.js"; +import { bootMachine } from "../machine.js"; + +const buildDrive = async ( + name: string, + drive: DriveConfig, + sdkImage: string, + destination: string, +): Promise => { + switch (drive.builder) { + case "directory": { + return buildDirectory(name, drive, sdkImage, destination); + } + case "docker": { + return buildDocker(name, drive, sdkImage, destination); + } + case "empty": { + return buildEmpty(name, drive, sdkImage, destination); + } + case "tar": { + return buildTar(name, drive, sdkImage, destination); + } + case "none": { + return buildNone(name, drive, destination); + } + } }; -const CARTESI_LABEL_PREFIX = "io.cartesi.rollups"; -const CARTESI_LABEL_RAM_SIZE = `${CARTESI_LABEL_PREFIX}.ram_size`; -const CARTESI_LABEL_DATA_SIZE = `${CARTESI_LABEL_PREFIX}.data_size`; -const CARTESI_DEFAULT_RAM_SIZE = "128Mi"; - -const CARTESI_LABEL_SDK_VERSION = `${CARTESI_LABEL_PREFIX}.sdk_version`; -const CARTESI_LABEL_SDK_NAME = `${CARTESI_LABEL_PREFIX}.sdk_name`; -const CARTESI_DEFAULT_SDK_VERSION = "0.12.0-alpha.0"; - -export default class BuildApplication extends BaseCommand< - typeof BuildApplication -> { +export default class Build extends BaseCommand { static summary = "Build application."; static description = - "Build application starting from a Dockerfile and ending with a snapshot of the corresponding Cartesi Machine already booted and yielded for the first time. This snapshot can be used to start a Cartesi node for the application using `run`. The process can also start from a Docker image built by the developer using `docker build` using the option `--from-image`"; + "Build application by building Cartesi machine drives, configuring a machine and booting it"; - static examples = [ - "<%= config.bin %> <%= command.id %>", - "<%= config.bin %> <%= command.id %> --from-image my-app", - ]; - - static args = {}; + static examples = ["<%= config.bin %> <%= command.id %>"]; static flags = { - "from-image": Flags.string({ - summary: "skip docker build and start from this image.", - description: - "if the build process of the application Dockerfile needs more control the developer can build the image using the `docker build` command, and then start the build process of the Cartesi machine starting from that image.", + config: Flags.file({ + char: "c", + default: "cartesi.toml", + summary: "path to the configuration file", }), - target: Flags.string({ - summary: "target of docker multi-stage build.", - description: - "if the application Dockerfile uses a multi-stage strategy, and stage of the image to be exported as a Cartesi machine is not the last stage, use this parameter to specify the target stage.", + "drives-only": Flags.boolean({ + default: false, + summary: "only build drives", }), }; - /** - * Build DApp image (linux/riscv64). Returns image id. - * @param directory path of context containing Dockerfile - */ - private async buildImage(options: ImageBuildOptions): Promise { - const buildResult = tmp.tmpNameSync(); - this.debug( - `building docker image and writing result to ${buildResult}`, - ); - const args = ["buildx", "build", "--load", "--iidfile", buildResult]; - if (options.target) { - args.push("--target", options.target); - } - - await execa("docker", [...args, process.cwd()], { stdio: "inherit" }); - return fs.readFileSync(buildResult, "utf8"); - } - - private async getImageInfo(image: string): Promise { - const { stdout: jsonStr } = await execa("docker", [ - "image", - "inspect", - image, - ]); - // parse image info from docker inspect output - const [imageInfo] = JSON.parse(jsonStr); - - // validate image architecture (must be riscv64) - if (imageInfo["Architecture"] !== "riscv64") { - throw new Error( - `Invalid image Architecture: ${imageInfo["Architecture"]}. Expected riscv64`, - ); - } - - const labels = imageInfo["Config"]["Labels"] || {}; - const info: ImageInfo = { - cmd: imageInfo["Config"]["Cmd"] ?? [], - dataSize: labels[CARTESI_LABEL_DATA_SIZE] ?? "10Mb", - entrypoint: imageInfo["Config"]["Entrypoint"] ?? [], - env: imageInfo["Config"]["Env"] || [], - ramSize: labels[CARTESI_LABEL_RAM_SIZE] ?? CARTESI_DEFAULT_RAM_SIZE, - sdkName: labels[CARTESI_LABEL_SDK_NAME] ?? "cartesi/sdk", - sdkVersion: - labels[CARTESI_LABEL_SDK_VERSION] ?? - CARTESI_DEFAULT_SDK_VERSION, - workdir: imageInfo["Config"]["WorkingDir"], - }; - - if (!info.entrypoint && !info.cmd) { - throw new Error("Undefined image ENTRYPOINT or CMD"); - } - - // fail if using unsupported sdk version - if (!semver.valid(info.sdkVersion)) { - this.warn("sdk version is not a valid semver"); - } else if ( - info.sdkName == "cartesi/sdk" && - semver.lt(info.sdkVersion, CARTESI_DEFAULT_SDK_VERSION) - ) { - throw new Error(`Unsupported sdk version: ${info.sdkVersion} (used) < ${CARTESI_DEFAULT_SDK_VERSION} (minimum). -Update your application Dockerfile using one of the templates at https://github.com/cartesi/application-templates/tree/${DEFAULT_TEMPLATES_BRANCH} -`); - } - - // warn for using default values - info.sdkVersion || - this.warn( - `Undefined ${CARTESI_LABEL_SDK_VERSION} label, defaulting to ${CARTESI_DEFAULT_SDK_VERSION}`, - ); - - info.ramSize || - this.warn( - `Undefined ${CARTESI_LABEL_RAM_SIZE} label, defaulting to ${CARTESI_DEFAULT_RAM_SIZE}`, - ); - - // validate data size value - if (bytes(info.dataSize) === null) { - throw new Error( - `Invalid ${CARTESI_LABEL_DATA_SIZE} value: ${info.dataSize}`, - ); - } - - // XXX: validate other values - - return info; - } - - // saves the OCI Image to a tarball - private async createTarball( - image: string, - outputFilePath: string, - ): Promise { - // create docker tarball from app image - await execa("docker", ["image", "save", image, "-o", outputFilePath]); - } - - // this wraps the call to the sdk image with a one-shot approach - // the (inputPath, outputPath) signature will mount the input as a volume and copy the output with docker cp - private async sdkRun( - sdkImage: string, - cmd: string[], - inputPath: string, - outputPath: string, - ): Promise { - const { stdout: cid } = await execa("docker", [ - "container", - "create", - "--volume", - `./${inputPath}:/tmp/input`, - sdkImage, - ...cmd, - ]); - - await execa("docker", ["container", "start", "-a", cid], { - stdio: "inherit", - }); - - await execa("docker", [ - "container", - "cp", - `${cid}:/tmp/output`, - outputPath, - ]); - - await execa("docker", ["container", "stop", cid]); - await execa("docker", ["container", "rm", cid]); - } - - // returns the command to create rootfs tarball from an OCI Image tarball - private static createRootfsTarCommand(): string[] { - const cmd = [ - "cat", - "/tmp/input", - "|", - "crane", - "export", - "-", // OCI Image from stdin - "-", // rootfs tarball to stdout - "|", - "bsdtar", - "-cf", - "/tmp/output", - "--format=gnutar", - "@/dev/stdin", // rootfs tarball from stdin - ]; - return ["/usr/bin/env", "bash", "-c", cmd.join(" ")]; - } - - // returns the command to create ext2 from a rootfs - private static createExt2Command(extraBytes: number): string[] { - const blockSize = 4096; - const extraBlocks = Math.ceil(extraBytes / blockSize); - const extraSize = `+${extraBlocks}`; - - return [ - "xgenext2fs", - "--tarball", - "/tmp/input", - "--block-size", - blockSize.toString(), - "--faketime", - "-r", - extraSize, - "/tmp/output", - ]; - } - - private static createMachineSnapshotCommand(info: ImageInfo): string[] { - const ramSize = info.ramSize; - const driveLabel = "root"; // XXX: does this need to be customizable? - - // list of environment variables of docker image - const envs = info.env.map((variable) => `--env=${variable}`); - - // ENTRYPOINT and CMD as a space separated string - const entrypoint = [...info.entrypoint, ...info.cmd].join(" "); - - // command to change working directory if WORKDIR is defined - const cwd = info.workdir ? `--workdir=${info.workdir}` : ""; - return [ - "create_machine_snapshot", - `--ram-length=${ramSize}`, - `--drive-label=${driveLabel}`, - `--drive-filename=/tmp/input`, - `--output=/tmp/output`, - cwd, - ...envs, - `--entrypoint=${entrypoint}`, - ]; - } - public async run(): Promise { - const { flags } = await this.parse(BuildApplication); - - const snapshotPath = this.getContextPath("image"); - const tarPath = this.getContextPath("image.tar"); - const gnuTarPath = this.getContextPath("image.gnutar"); - const ext2Path = this.getContextPath("image.ext2"); + const { flags } = await this.parse(Build); // clean up temp files we create along the process tmp.setGracefulCleanup(); - // use pre-existing image or build dapp image - const appImage = flags["from-image"] || (await this.buildImage(flags)); - - // prepare context directory - await fs.emptyDir(this.getContextPath()); // XXX: make it less error prone + // get application configuration from 'cartesi.toml' + const config = this.getApplicationConfig(flags.config); - // get and validate image info - const imageInfo = await this.getImageInfo(appImage); + // destination directory for image and intermediate files + const destination = path.resolve(this.getContextPath()); - // resolve sdk version - const sdkImage = `${imageInfo.sdkName}:${imageInfo.sdkVersion}`; + // prepare context directory + await fs.emptyDir(destination); // XXX: make it less error prone + + // start build of all drives simultaneously + const results = Object.entries(config.drives).reduce< + Record> + >((acc, [name, drive]) => { + acc[name] = buildDrive(name, drive, config.sdk, destination); + return acc; + }, {}); + + // await for all drives to be built + await Promise.all(Object.values(results)); + + if (flags["drives-only"]) { + // only build drives, so quit here + return; + } - try { - // create docker tarball for image specified - await this.createTarball(appImage, tarPath); + // get image info of root drive + const root = await results["root"]; + const imageInfo = root || undefined; - // create rootfs tar - await this.sdkRun( - sdkImage, - BuildApplication.createRootfsTarCommand(), - tarPath, - gnuTarPath, - ); + // path of machine snapshot + const snapshotPath = this.getContextPath("image"); - // create ext2 - await this.sdkRun( - sdkImage, - BuildApplication.createExt2Command( - bytes.parse(imageInfo.dataSize), - ), - gnuTarPath, - ext2Path, - ); + // create machine snapshot + await bootMachine(config, imageInfo, destination); - // create machine snapshot - await this.sdkRun( - sdkImage, - BuildApplication.createMachineSnapshotCommand(imageInfo), - ext2Path, - snapshotPath, - ); - await fs.chmod(snapshotPath, 0o755); - } finally { - await fs.remove(gnuTarPath); - await fs.remove(tarPath); - } + await fs.chmod(snapshotPath, 0o755); } } diff --git a/apps/cli/src/commands/shell.ts b/apps/cli/src/commands/shell.ts index 3c8a928b..c7612854 100644 --- a/apps/cli/src/commands/shell.ts +++ b/apps/cli/src/commands/shell.ts @@ -1,9 +1,9 @@ import { Args, Flags } from "@oclif/core"; -import { execa } from "execa"; import fs from "fs-extra"; -import { lookpath } from "lookpath"; import path from "path"; import { BaseCommand } from "../baseCommand.js"; +import { ImageInfo } from "../config.js"; +import { bootMachine } from "../machine.js"; export default class Shell extends BaseCommand { static description = "Start a shell in cartesi machine of application"; @@ -18,62 +18,64 @@ export default class Shell extends BaseCommand { }; static flags = { + command: Flags.string({ + default: "/bin/sh", + description: "shell command to run", + summary: "shell to run", + }), + config: Flags.file({ + char: "c", + default: "cartesi.toml", + summary: "path to the configuration file", + }), "run-as-root": Flags.boolean({ - description: "run as root user", default: false, + description: "run as root user", + summary: "run the cartesi machine as the root user", }), }; - private async startShell( - ext2Path: string, - runAsRoot: boolean, - ): Promise { - const containerDir = "/mnt"; - const bind = `${path.resolve(path.dirname(ext2Path))}:${containerDir}`; - const ext2 = path.join(containerDir, path.basename(ext2Path)); - const ramSize = "128Mi"; - const driveLabel = "root"; - const sdkImage = "cartesi/sdk:0.10.0"; // XXX: how to resolve sdk version? - const args = [ - "run", - "--interactive", - "--tty", - "--volume", - bind, - sdkImage, - "cartesi-machine", - `--ram-length=${ramSize}`, - "--append-bootargs=no4lvl", - `--flash-drive=label:${driveLabel},filename:${ext2}`, - ]; + public async run(): Promise { + const { flags } = await this.parse(Shell); - if (runAsRoot) { - args.push("--append-init=USER=root"); - } + // get application configuration from 'cartesi.toml' + const config = this.getApplicationConfig(flags.config); - if (!(await lookpath("stty"))) { - args.push("-i"); - } else { - args.push("-it"); + // destination directory for image and intermediate files + const destination = path.resolve(this.getContextPath()); + + // check if all drives are built + for (const [name, drive] of Object.entries(config.drives)) { + const filename = `${name}.${drive.format}`; + const pathname = this.getContextPath(filename); + if (!fs.existsSync(pathname)) { + throw new Error( + `drive '${name}' not built, run '${this.config.bin} build'`, + ); + } } - await execa("docker", [...args, "--", "/bin/bash"], { - stdio: "inherit", - }); - } + // create shell entrypoint + const info: ImageInfo = { + cmd: [], + entrypoint: [this.flags.command], + env: [], + workdir: "/", + }; - public async run(): Promise { - const { flags } = await this.parse(Shell); + // start with interactive mode on + config.machine.interactive = true; - // use pre-existing image or build dapp image - const ext2Path = this.getContextPath("image.ext2"); - if (!fs.existsSync(ext2Path)) { - throw new Error( - `machine not built, run '${this.config.bin} build'`, - ); - } + // interactive mode can't have final hash + config.machine.finalHash = false; + + // do not store machine in interactive mode + config.machine.store = undefined; + + // run as root if flag is set + config.machine.user = flags["run-as-root"] ? "root" : undefined; - // execute the machine and save snapshot - await this.startShell(ext2Path, flags["run-as-root"]); + // boot machine + await bootMachine(config, info, destination); } } diff --git a/apps/cli/src/config.ts b/apps/cli/src/config.ts new file mode 100644 index 00000000..413e3d9c --- /dev/null +++ b/apps/cli/src/config.ts @@ -0,0 +1,503 @@ +import bytes from "bytes"; +import os from "os"; +import { extname } from "path"; +import { TomlPrimitive, parse as parseToml } from "smol-toml"; + +/** + * Typed Errors + */ +export class InvalidBuilderError extends Error { + constructor(builder: TomlPrimitive) { + super(`Invalid builder: ${builder}`); + this.name = "InvalidBuilder"; + } +} + +export class InvalidDriveFormatError extends Error { + constructor(format: TomlPrimitive) { + super(`Invalid drive format: ${format}`); + this.name = "InvalidDriveFormatError"; + } +} + +export class InvalidEmptyDriveFormatError extends Error { + constructor(format: TomlPrimitive) { + super(`Invalid empty drive format: ${format}`); + this.name = "InvalidEmptyDriveFormatError"; + } +} + +export class InvalidStringValueError extends Error { + constructor(value: TomlPrimitive) { + super(`Invalid string value: ${value}`); + this.name = "InvalidStringValueError"; + } +} + +export class InvalidBooleanValueError extends Error { + constructor(value: TomlPrimitive) { + super(`Invalid boolean value: ${value}`); + this.name = "InvalidBooleanValueError"; + } +} + +export class InvalidNumberValueError extends Error { + constructor(value: TomlPrimitive) { + super(`Invalid number value: ${value}`); + this.name = "InvalidNumberValueError"; + } +} + +export class InvalidBytesValueError extends Error { + constructor(value: TomlPrimitive) { + super(`Invalid bytes value: ${value}`); + this.name = "InvalidBytesValueError"; + } +} + +export class RequiredFieldError extends Error { + constructor(key: TomlPrimitive) { + super(`Missing required field: ${key}`); + this.name = "RequiredFieldError"; + } +} + +export class InvalidStringArrayError extends Error { + constructor() { + super(`Invalid string array`); + this.name = "InvalidStringArrayError"; + } +} + +/** + * Configuration for drives of a Cartesi Machine. A drive may already exist or be built by a builder + */ +const DEFAULT_FORMAT = "ext2"; +const DEFAULT_RAM = "128Mi"; +const DEFAULT_RAM_IMAGE_DOCKER = "/usr/share/cartesi-machine/images/linux.bin"; +const DEFAULT_RAM_IMAGE_LINUX = "/usr/share/cartesi-machine/images/linux.bin"; +const DEFAULT_RAM_IMAGE_MAC = + "/opt/homebrew/share/cartesi-machine/images/linux.bin"; +const DEFAULT_SDK = "cartesi/sdk:0.10.0"; + +type Builder = "directory" | "docker" | "empty" | "none" | "tar"; +type DriveFormat = "ext2" | "sqfs"; + +export type ImageInfo = { + cmd: string[]; + entrypoint: string[]; + env: string[]; + workdir: string; +}; + +export type DriveResult = ImageInfo | undefined | void; + +export type DirectoryDriveConfig = { + builder: "directory"; + extraSize: number; // default is 0 (no extra size) + format: DriveFormat; + directory: string; // required +}; + +export type DockerDriveConfig = { + builder: "docker"; + context: string; + dockerfile: string; + extraSize: number; // default is 0 (no extra size) + format: DriveFormat; + image?: string; // default is to build an image from a Dockerfile + tags: string[]; // default is empty array + target?: string; // default is last stage of multi-stage +}; + +export type EmptyDriveConfig = { + builder: "empty"; + format: "ext2" | "raw"; + size: number; // in bytes +}; + +export type ExistingDriveConfig = { + builder: "none"; + filename: string; // required + format: DriveFormat; +}; + +export type TarDriveConfig = { + builder: "tar"; + filename: string; // required + format: DriveFormat; + extraSize: number; // default is 0 (no extra size) +}; + +export type DriveConfig = ( + | DirectoryDriveConfig + | DockerDriveConfig + | EmptyDriveConfig + | ExistingDriveConfig + | TarDriveConfig +) & { + mount?: string | boolean; // default given by cartesi-machine + shared?: boolean; // default given by cartesi-machine + user?: string; // default given by cartesi-machine +}; + +export type MachineConfig = { + assertRollingTemplate?: boolean; // default given by cartesi-machine + bootargs: string[]; + entrypoint?: string; + finalHash: boolean; + interactive?: boolean; // default given by cartesi-machine + maxMCycle?: bigint; // default given by cartesi-machine + noRollup?: boolean; // default given by cartesi-machine + ramLength: string; + ramImage: string; + store?: string; + user?: string; // default given by cartesi-machine +}; + +export type Config = { + drives: Record; + machine: MachineConfig; + sdk: string; +}; + +type TomlTable = { [key: string]: TomlPrimitive }; + +export const defaultRootDriveConfig = (): DriveConfig => ({ + builder: "docker", + context: ".", + dockerfile: "Dockerfile", // file on current working directory + extraSize: 0, + format: DEFAULT_FORMAT, + tags: [], +}); + +export const defaultRamImage = (): string => { + switch (os.platform()) { + case "darwin": + return DEFAULT_RAM_IMAGE_MAC; + default: + return DEFAULT_RAM_IMAGE_LINUX; + } +}; + +export const defaultMachineConfig = (): MachineConfig => ({ + assertRollingTemplate: undefined, + bootargs: [], + entrypoint: undefined, + finalHash: true, + interactive: undefined, + maxMCycle: undefined, + noRollup: undefined, + ramLength: DEFAULT_RAM, + ramImage: defaultRamImage(), + store: "image", + user: undefined, +}); + +export const defaultConfig = (): Config => ({ + drives: { root: defaultRootDriveConfig() }, + machine: defaultMachineConfig(), + sdk: DEFAULT_SDK, +}); + +const parseBoolean = (value: TomlPrimitive, defaultValue: boolean): boolean => { + if (value === undefined) { + return defaultValue; + } else if (typeof value === "boolean") { + return value; + } + throw new InvalidBooleanValueError(value); +}; + +const parseOptionalBoolean = (value: TomlPrimitive): boolean | undefined => { + if (value === undefined) { + return undefined; + } else if (typeof value === "boolean") { + return value; + } + throw new InvalidBooleanValueError(value); +}; + +const parseString = (value: TomlPrimitive, defaultValue: string): string => { + if (value === undefined) { + return defaultValue; + } else if (typeof value === "string") { + return value; + } + throw new InvalidStringValueError(value); +}; + +const parseStringArray = (value: TomlPrimitive): string[] => { + if (value === undefined) { + return []; + } else if (typeof value === "string") { + return [value]; + } else if (typeof value === "object" && Array.isArray(value)) { + return value.map((v) => { + if (typeof v === "string") { + return v; + } + throw new InvalidStringValueError(v); + }); + } + throw new InvalidStringArrayError(); +}; + +const parseRequiredString = (value: TomlPrimitive, key: string): string => { + if (value === undefined) { + throw new RequiredFieldError(key); + } else if (typeof value === "string") { + return value; + } + throw new InvalidStringValueError(value); +}; + +const parseOptionalString = (value: TomlPrimitive): string | undefined => { + if (value === undefined) { + return undefined; + } else if (typeof value === "string") { + return value; + } + throw new InvalidStringValueError(value); +}; + +const parseOptionalStringBoolean = ( + value: TomlPrimitive, +): string | boolean | undefined => { + if (value === undefined) { + return undefined; + } else if (typeof value === "string") { + return value; + } else if (typeof value === "boolean") { + return value; + } + throw new InvalidStringValueError(value); +}; + +const parseOptionalNumber = (value: TomlPrimitive): bigint | undefined => { + if (value === undefined) { + return undefined; + } else if (typeof value === "bigint") { + return value; + } else if (typeof value === "number") { + return BigInt(value); + } + throw new InvalidNumberValueError(value); +}; + +const parseBytes = (value: TomlPrimitive, defaultValue: number): number => { + if (value === undefined) { + return defaultValue; + } else if (typeof value === "bigint") { + return Number(value); + } else if (typeof value === "number" || typeof value === "string") { + const output = bytes.parse(value); + if (output !== null) { + return output; + } + } + throw new InvalidBytesValueError(value); +}; + +const parseBuilder = (value: TomlPrimitive): Builder => { + if (value === undefined) { + return "docker"; + } else if (typeof value === "string") { + switch (value) { + case "directory": + return "directory"; + case "docker": + return "docker"; + case "empty": + return "empty"; + case "none": + return "none"; + case "tar": + return "tar"; + } + } + throw new InvalidBuilderError(value); +}; + +const parseFormat = (value: TomlPrimitive): DriveFormat => { + if (value === undefined) { + return DEFAULT_FORMAT; + } else if (typeof value === "string") { + switch (value) { + case "ext2": + return "ext2"; + case "sqfs": + return "sqfs"; + } + } + throw new InvalidDriveFormatError(value); +}; + +const parseEmptyFormat = (value: TomlPrimitive): "ext2" | "raw" => { + if (value === undefined) { + return DEFAULT_FORMAT; + } else if (typeof value === "string") { + switch (value) { + case "ext2": + return "ext2"; + case "raw": + return "raw"; + } + } + throw new InvalidEmptyDriveFormatError(value); +}; + +const parseMachine = (value: TomlPrimitive): MachineConfig => { + if (value === undefined) { + // default machine + return defaultMachineConfig(); + } + if (typeof value !== "object") { + throw new Error(`Invalid machine configuration: ${value}`); + } + const toml = value as TomlTable; + + return { + assertRollingTemplate: parseOptionalBoolean( + toml["assert-rolling-template"], + ), + bootargs: parseStringArray(toml.bootargs), + finalHash: parseBoolean(toml["final-hash"], true), + interactive: undefined, + maxMCycle: parseOptionalNumber(toml["max-mcycle"]), + noRollup: parseBoolean(toml["no-rollup"], false), + ramLength: parseString(toml["ram-length"], DEFAULT_RAM), + ramImage: parseString(toml["ram-image"], defaultRamImage()), + store: "image", + user: parseOptionalString(toml.user), + }; +}; + +export const getDriveFormat = (filename: string): DriveFormat => { + const extension = extname(filename); + switch (extension) { + case ".ext2": + return "ext2"; + case ".sqfs": + return "sqfs"; + default: + throw new InvalidDriveFormatError(extension); + } +}; + +const parseDrive = (drive: TomlPrimitive): DriveConfig => { + const builder = parseBuilder((drive as TomlTable).builder); + switch (builder) { + case "directory": { + const { extraSize, format, mount, directory, shared, user } = + drive as TomlTable; + return { + builder: "directory", + extraSize: parseBytes(extraSize, 0), + format: parseFormat(format), + mount: parseOptionalStringBoolean(mount), + directory: parseRequiredString(directory, "directory"), + shared: parseOptionalBoolean(shared), + user: parseOptionalString(user), + }; + } + case "docker": { + const { + context, + dockerfile, + extraSize, + format, + image, + mount, + shared, + tags, + target, + user, + } = drive as TomlTable; + return { + builder: "docker", + image: parseOptionalString(image), + context: parseString(context, "."), + dockerfile: parseString(dockerfile, "Dockerfile"), + extraSize: parseBytes(extraSize, 0), + format: parseFormat(format), + mount: parseOptionalStringBoolean(mount), + shared: parseOptionalBoolean(shared), + user: parseOptionalString(user), + tags: parseStringArray(tags), + target: parseOptionalString(target), + }; + } + case "empty": { + const { format, mount, size, shared, user } = drive as TomlTable; + return { + builder: "empty", + format: parseEmptyFormat(format), + mount: parseOptionalStringBoolean(mount), + shared: parseOptionalBoolean(shared), + size: parseBytes(size, 0), + user: parseOptionalString(user), + }; + } + case "tar": { + const { extraSize, filename, format, mount, shared, user } = + drive as TomlTable; + return { + builder: "tar", + extraSize: parseBytes(extraSize, 0), + filename: parseRequiredString(filename, "filename"), + format: parseFormat(format), + mount: parseOptionalStringBoolean(mount), + shared: parseOptionalBoolean(shared), + user: parseOptionalString(user), + }; + } + case "none": { + const { shared, mount, user } = drive as TomlTable; + const filename = parseRequiredString( + (drive as TomlTable).filename, + "filename", + ); + const format = getDriveFormat(filename); + return { + builder: "none", + filename, + format, + mount: parseOptionalStringBoolean(mount), + shared: parseOptionalBoolean(shared), + user: parseOptionalString(user), + }; + } + } +}; + +const parseDrives = (config: TomlPrimitive): Record => { + // load drives from configuration + const drives = Object.entries((config as TomlTable) ?? {}).reduce< + Record + >((acc, [name, drive]) => { + acc[name] = parseDrive(drive); + return acc; + }, {}); + + // check if there is a root drive + const hasRoot = drives.root !== undefined; + if (!hasRoot) { + // there is no root drive, add a default one + drives.root = defaultRootDriveConfig(); + } + return drives; +}; + +export const parse = (str: string): Config => { + const toml = parseToml(str); + + const config: Config = { + drives: parseDrives(toml.drives), + machine: parseMachine(toml.machine), + sdk: parseString(toml.sdk, DEFAULT_SDK), + }; + + return config; +}; diff --git a/apps/cli/src/machine.ts b/apps/cli/src/machine.ts new file mode 100644 index 00000000..18b629fc --- /dev/null +++ b/apps/cli/src/machine.ts @@ -0,0 +1,112 @@ +import { Config, DriveConfig, ImageInfo } from "./config.js"; +import { cartesiMachine } from "./exec/index.js"; + +const flashDrive = (label: string, drive: DriveConfig): string => { + const { format, mount, shared, user } = drive; + const filename = `${label}.${format}`; + const vars = [`label:${label}`, `filename:${filename}`]; + if (mount !== undefined) { + vars.push(`mount:${mount}`); + } + if (user) { + vars.push(`user:${user}`); + } + if (shared) { + vars.push("shared"); + } + // don't specify start and length + return `--flash-drive=${vars.join(",")}`; +}; + +export const bootMachine = async ( + config: Config, + info: ImageInfo | undefined, + destination: string, +) => { + const { machine } = config; + const { + assertRollingTemplate, + finalHash, + interactive, + maxMCycle, + noRollup, + ramLength, + ramImage, + store, + user, + } = machine; + + // list of environment variables of docker image + const env = info?.env ?? []; + const envs = env.map((variable) => `--env=${variable}`); + + // check if we need a rootfstype boot arg + const root = config.drives.root; + if (root?.format === "sqfs") { + const definedRootfsType = config.machine.bootargs.find((arg) => + arg.startsWith("rootfstype="), + ); + // not checking here if user intentionally defined wrong type + if (!definedRootfsType) { + config.machine.bootargs.push("rootfstype=squashfs"); + } + } + + // bootargs from config string array + const bootargs = machine.bootargs.map( + (arg) => `--append-bootargs="${arg}"`, + ); + + // entrypoint from config or image info (Docker ENTRYPOINT + CMD) + const entrypoint = + machine.entrypoint ?? // takes priority + (info ? [...info.entrypoint, ...info.cmd].join(" ") : undefined); // ENTRYPOINT and CMD as a space separated string + + if (!entrypoint) { + throw new Error("Undefined machine entrypoint"); + } + + const flashDrives = Object.entries(config.drives).map(([label, drive]) => + flashDrive(label, drive), + ); + + // command to change working directory if WORKDIR is defined + const args = [ + ...bootargs, + ...envs, + ...flashDrives, + `--ram-image=${ramImage}`, + `--ram-length=${ramLength}`, + `--append-entrypoint=${entrypoint}`, + ]; + if (assertRollingTemplate) { + args.push("--assert-rolling-template"); + } + if (finalHash) { + args.push("--final-hash"); + } + if (info?.workdir) { + args.push(`--workdir="${info.workdir}"`); + } + if (interactive) { + args.push("-it"); + } + if (noRollup) { + args.push("--no-rollup"); + } + if (maxMCycle) { + args.push(`--max-mcycle=${maxMCycle.toString()}`); + } + if (store) { + args.push(`--store=${store}`); + } + if (user) { + args.push(`--user=${user}`); + } + + return cartesiMachine.boot(args, { + cwd: destination, + image: config.sdk, + stdio: "inherit", + }); +}; diff --git a/apps/cli/test/builder/data/Dockerfile b/apps/cli/test/builder/data/Dockerfile new file mode 100644 index 00000000..713be177 --- /dev/null +++ b/apps/cli/test/builder/data/Dockerfile @@ -0,0 +1,5 @@ +FROM --platform=linux/riscv64 ubuntu:22.04 AS test +ADD ./file2 . + +FROM --platform=linux/riscv64 ubuntu:22.04 +ADD ./file1 . diff --git a/apps/cli/test/builder/data/Dockerfile.nonriscv b/apps/cli/test/builder/data/Dockerfile.nonriscv new file mode 100644 index 00000000..95ea117f --- /dev/null +++ b/apps/cli/test/builder/data/Dockerfile.nonriscv @@ -0,0 +1,2 @@ +FROM scratch +ADD ./file1 . diff --git a/apps/cli/test/builder/data/data.ext2 b/apps/cli/test/builder/data/data.ext2 new file mode 100644 index 00000000..fd2665b9 Binary files /dev/null and b/apps/cli/test/builder/data/data.ext2 differ diff --git a/apps/cli/test/builder/data/data.sqfs b/apps/cli/test/builder/data/data.sqfs new file mode 100644 index 00000000..809073c1 Binary files /dev/null and b/apps/cli/test/builder/data/data.sqfs differ diff --git a/apps/cli/test/builder/data/data.tar b/apps/cli/test/builder/data/data.tar new file mode 100644 index 00000000..8944d57c Binary files /dev/null and b/apps/cli/test/builder/data/data.tar differ diff --git a/apps/cli/test/builder/data/file1 b/apps/cli/test/builder/data/file1 new file mode 100644 index 00000000..e69de29b diff --git a/apps/cli/test/builder/data/file2 b/apps/cli/test/builder/data/file2 new file mode 100644 index 00000000..e69de29b diff --git a/apps/cli/test/builder/data/sample1/file1 b/apps/cli/test/builder/data/sample1/file1 new file mode 100644 index 00000000..01a59b01 --- /dev/null +++ b/apps/cli/test/builder/data/sample1/file1 @@ -0,0 +1 @@ +lorem ipsum diff --git a/apps/cli/test/builder/data/sample1/file2 b/apps/cli/test/builder/data/sample1/file2 new file mode 100644 index 00000000..6e56bcd2 --- /dev/null +++ b/apps/cli/test/builder/data/sample1/file2 @@ -0,0 +1 @@ +lorem ipsum lorem ipsum diff --git a/apps/cli/test/builder/directory.test.ts b/apps/cli/test/builder/directory.test.ts new file mode 100644 index 00000000..e3ce184e --- /dev/null +++ b/apps/cli/test/builder/directory.test.ts @@ -0,0 +1,97 @@ +import fs from "fs-extra"; +import path from "path"; +import { describe, expect } from "vitest"; +import { build } from "../../src/builder/directory"; +import { DirectoryDriveConfig } from "../../src/config"; +import { tmpdirTest } from "./tmpdirTest"; + +describe("when building with the directory builder", () => { + const image = "cartesi/sdk:0.11.0"; + + tmpdirTest( + "should fail when the directory doesn't exists", + async ({ tmpdir }) => { + const destination = tmpdir; + const drive: DirectoryDriveConfig = { + builder: "directory", + directory: path.join(__dirname, "data", "invalid"), + extraSize: 0, + format: "ext2", + }; + await expect( + build("root", drive, image, destination), + ).rejects.toThrow("no such file or directory"); + }, + ); + + tmpdirTest( + "should fail when the directory is empty", + async ({ tmpdir }) => { + const destination = tmpdir; + const directory = path.join(__dirname, "data", "empty"); + fs.ensureDirSync(directory); + const drive: DirectoryDriveConfig = { + builder: "directory", + directory, + extraSize: 0, + format: "ext2", + }; + await expect( + build("root", drive, image, destination), + ).rejects.toThrow("too few blocks"); + }, + ); + + tmpdirTest( + "should pass when the directory is empty but extra size is defined", + async ({ tmpdir }) => { + const destination = tmpdir; + const directory = path.join(__dirname, "data", "empty"); + fs.ensureDirSync(directory); + const drive: DirectoryDriveConfig = { + builder: "directory", + directory, + extraSize: 1024 * 1024, // 1Mb + format: "ext2", + }; + await build("root", drive, image, destination); + const filename = path.join(destination, "root.ext2"); + const stat = fs.statSync(filename); + expect(stat.size).toEqual(1069056); + }, + ); + + tmpdirTest( + "should pass for a populated directory (ext2)", + async ({ tmpdir }) => { + const destination = tmpdir; + const drive: DirectoryDriveConfig = { + builder: "directory", + directory: path.join(__dirname, "data", "sample1"), + extraSize: 0, + format: "ext2", + }; + await build("root", drive, image, destination); + const filename = path.join(destination, "root.ext2"); + const stat = fs.statSync(filename); + expect(stat.size).toEqual(32768); + }, + ); + + tmpdirTest( + "should pass for a populated directory (sqfs)", + async ({ tmpdir }) => { + const destination = tmpdir; + const drive: DirectoryDriveConfig = { + builder: "directory", + directory: path.join(__dirname, "data", "sample1"), + extraSize: 0, + format: "sqfs", + }; + await build("root", drive, image, destination); + const filename = path.join(destination, "root.sqfs"); + const stat = fs.statSync(filename); + expect(stat.size).toEqual(4096); + }, + ); +}); diff --git a/apps/cli/test/builder/docker.test.ts b/apps/cli/test/builder/docker.test.ts new file mode 100644 index 00000000..b6c73bea --- /dev/null +++ b/apps/cli/test/builder/docker.test.ts @@ -0,0 +1,106 @@ +import fs from "fs-extra"; +import { beforeEach } from "node:test"; +import path from "path"; +import { describe, expect } from "vitest"; +import { build } from "../../src/builder/docker"; +import { DockerDriveConfig } from "../../src/config"; +import { tmpdirTest } from "./tmpdirTest"; + +describe("when building with the docker builder", () => { + const image = "cartesi/sdk:0.11.0"; + + beforeEach(({ name }) => { + fs.mkdirpSync(path.join(__dirname, "output", name)); + }); + + tmpdirTest("should fail without correct context", async ({ tmpdir }) => { + const destination = tmpdir; + const drive: DockerDriveConfig = { + builder: "docker", + context: ".", + dockerfile: "Dockerfile", + extraSize: 0, + format: "ext2", + tags: [], + image: undefined, + target: undefined, + }; + await expect(build("root", drive, image, destination)).rejects.toThrow( + "exit code 1", + ); + }); + + tmpdirTest("should fail a non-riscv image", async ({ tmpdir }) => { + const destination = tmpdir; + const drive: DockerDriveConfig = { + builder: "docker", + context: path.join(__dirname, "data"), + dockerfile: path.join(__dirname, "data", "Dockerfile.nonriscv"), + extraSize: 0, + format: "ext2", + tags: [], + image: undefined, + target: undefined, + }; + await expect(build("root", drive, image, destination)).rejects.toThrow( + "Expected riscv64", + ); + }); + + tmpdirTest( + "should build an ext2 drive with a target definition", + async ({ tmpdir }) => { + const destination = tmpdir; + const drive: DockerDriveConfig = { + builder: "docker", + context: path.join(__dirname, "data"), + dockerfile: path.join(__dirname, "data", "Dockerfile"), + extraSize: 0, + format: "ext2", + tags: [], + image: undefined, + target: "test", + }; + await build("root", drive, image, destination); + const filename = path.join(destination, "root.ext2"); + const stat = fs.statSync(filename); + expect(stat.size).toEqual(76087296); + }, + ); + + tmpdirTest("should build an ext2 drive", async ({ tmpdir }) => { + const destination = tmpdir; + const drive: DockerDriveConfig = { + builder: "docker", + context: path.join(__dirname, "data"), + dockerfile: path.join(__dirname, "data", "Dockerfile"), + extraSize: 0, + format: "ext2", + tags: [], + image: undefined, + target: undefined, + }; + await build("root", drive, image, destination); + const filename = path.join(destination, "root.ext2"); + const stat = fs.statSync(filename); + expect(stat.size).toEqual(76087296); + }); + + tmpdirTest.skip("should build a sqfs drive", async ({ tmpdir }) => { + const destination = tmpdir; + const drive: DockerDriveConfig = { + builder: "docker", + context: path.join(__dirname, "data"), + dockerfile: path.join(__dirname, "data", "Dockerfile"), + extraSize: 0, + format: "sqfs", + tags: [], + image: undefined, + target: undefined, + }; + await build("root", drive, image, destination); + const filename = path.join(destination, "root.sqfs"); + const stat = fs.statSync(filename); + expect(stat.size).toEqual(29327360); + }); +}); diff --git a/apps/cli/test/builder/empty.test.ts b/apps/cli/test/builder/empty.test.ts new file mode 100644 index 00000000..e5fde98e --- /dev/null +++ b/apps/cli/test/builder/empty.test.ts @@ -0,0 +1,56 @@ +import fs from "fs-extra"; +import path from "path"; +import { describe, expect } from "vitest"; +import { build } from "../../src/builder/empty"; +import { EmptyDriveConfig } from "../../src/config"; +import { tmpdirTest } from "./tmpdirTest"; + +describe("when building with the empty builder", () => { + const image = "cartesi/sdk:0.11.0"; + + tmpdirTest("should fail with an invalid size", async ({ tmpdir }) => { + const destination = tmpdir; + const drive: EmptyDriveConfig = { + builder: "empty", + format: "ext2", + size: 0, + }; + await expect(build("root", drive, image, destination)).rejects.toThrow( + "too few blocks", + ); + }); + + tmpdirTest("should pass and create ext2 drive", async ({ tmpdir }) => { + const destination = tmpdir; + const driveName = "root.ext2"; + const drive: EmptyDriveConfig = { + builder: "empty", + format: "ext2", + size: 1024 * 1024 * 1, // 1Mb + }; + await build("root", drive, image, destination); + + const filename = path.join(destination, driveName); + expect(fs.existsSync(filename)).toBeTruthy(); + const stat = await fs.stat(filename); + expect(stat.isFile()).toBeTruthy(); + expect(stat.size).toEqual(1 * 1024 * 1024); + }); + + tmpdirTest("should pass and create raw drive", async ({ tmpdir }) => { + const destination = tmpdir; + const driveName = "root.raw"; + const drive: EmptyDriveConfig = { + builder: "empty", + format: "raw", + size: 1024 * 1024 * 1, // 1Mb + }; + await build("root", drive, image, destination); + + const filename = path.join(destination, driveName); + expect(fs.existsSync(filename)).toBeTruthy(); + const stat = await fs.stat(filename); + expect(stat.isFile()).toBeTruthy(); + expect(stat.size).toEqual(1 * 1024 * 1024); + }); +}); diff --git a/apps/cli/test/builder/none.test.ts b/apps/cli/test/builder/none.test.ts new file mode 100644 index 00000000..8684d3f8 --- /dev/null +++ b/apps/cli/test/builder/none.test.ts @@ -0,0 +1,34 @@ +import fs from "fs-extra"; +import path from "path"; +import { describe, expect } from "vitest"; +import { build } from "../../src/builder/none"; +import { ExistingDriveConfig } from "../../src/config"; +import { tmpdirTest } from "./tmpdirTest"; + +describe("when building with the none builder", () => { + tmpdirTest("should not build a missing file", async ({ tmpdir }) => { + const destination = tmpdir; + const drive: ExistingDriveConfig = { + builder: "none", + filename: path.join(__dirname, "data", "missing.ext2"), + format: "ext2", + }; + await expect(build("root", drive, destination)).rejects.toThrow( + "no such file or directory", + ); + }); + + tmpdirTest("should just copy an existing drive", async ({ tmpdir }) => { + const destination = tmpdir; + const filename = path.join(__dirname, "data", "data.ext2"); + const drive: ExistingDriveConfig = { + builder: "none", + filename, + format: "ext2", + }; + await build("root", drive, destination); + const src = fs.statSync(filename); + const dest = fs.statSync(path.join(destination, "root.ext2")); + expect(dest.size).toEqual(src.size); + }); +}); diff --git a/apps/cli/test/builder/tar.test.ts b/apps/cli/test/builder/tar.test.ts new file mode 100644 index 00000000..0460b35d --- /dev/null +++ b/apps/cli/test/builder/tar.test.ts @@ -0,0 +1,51 @@ +import fs from "fs-extra"; +import path from "path"; +import { describe, expect } from "vitest"; +import { build } from "../../src/builder/tar"; +import { TarDriveConfig } from "../../src/config"; +import { tmpdirTest } from "./tmpdirTest"; + +describe("when building with the tar builder", () => { + const image = "cartesi/sdk:0.11.0"; + + tmpdirTest("should not build a missing file", async ({ tmpdir }) => { + const destination = tmpdir; + const drive: TarDriveConfig = { + builder: "tar", + filename: path.join(__dirname, "data", "unexisting.tar"), + extraSize: 0, + format: "ext2", + }; + await expect(build("root", drive, image, destination)).rejects.toThrow( + "no such file or directory", + ); + }); + + tmpdirTest("should build a ext2 drive", async ({ tmpdir }) => { + const destination = tmpdir; + const drive: TarDriveConfig = { + builder: "tar", + filename: path.join(__dirname, "data", "data.tar"), + extraSize: 0, + format: "ext2", + }; + await build("root", drive, image, destination); + const filename = path.join(destination, "root.ext2"); + const stat = fs.statSync(filename); + expect(stat.size).toEqual(36864); + }); + + tmpdirTest("should build a sqfs drive", async ({ tmpdir }) => { + const destination = tmpdir; + const drive: TarDriveConfig = { + builder: "tar", + filename: path.join(__dirname, "data", "data.tar"), + extraSize: 0, + format: "sqfs", + }; + await build("root", drive, image, destination); + const filename = path.join(destination, "root.sqfs"); + const stat = fs.statSync(filename); + expect(stat.size).toEqual(4096); + }); +}); diff --git a/apps/cli/test/builder/tmpdirTest.ts b/apps/cli/test/builder/tmpdirTest.ts new file mode 100644 index 00000000..da4cfa33 --- /dev/null +++ b/apps/cli/test/builder/tmpdirTest.ts @@ -0,0 +1,23 @@ +import fs from "fs-extra"; +import os from "node:os"; +import path from "node:path"; +import { test } from "vitest"; + +interface TmpDirFixture { + tmpdir: string; +} + +const createTempDir = async () => { + const ostmpdir = os.tmpdir(); + const tmpdir = path.join(ostmpdir, "unit-test-"); + return await fs.mkdtemp(tmpdir); +}; + +export const tmpdirTest = test.extend({ + // eslint-disable-next-line no-empty-pattern + tmpdir: async ({}, use) => { + const directory = await createTempDir(); + await use(directory); + await fs.rm(directory, { recursive: true }); + }, +}); diff --git a/apps/cli/test/config.test.ts b/apps/cli/test/config.test.ts new file mode 100644 index 00000000..39207482 --- /dev/null +++ b/apps/cli/test/config.test.ts @@ -0,0 +1,239 @@ +import { describe, expect, it } from "vitest"; +import { + defaultConfig, + defaultMachineConfig, + InvalidBooleanValueError, + InvalidBuilderError, + InvalidBytesValueError, + InvalidDriveFormatError, + InvalidEmptyDriveFormatError, + InvalidNumberValueError, + InvalidStringValueError, + parse, + RequiredFieldError, +} from "../src/config.js"; + +describe("when parsing a cartesi.toml config", () => { + it("should load the default config when file is empty", () => { + const config = parse(""); + expect(config).toEqual(defaultConfig()); + }); + + it("non-standard root drive", () => { + const config = parse(`[drives.root] +builder = "docker" +dockerfile = "backend/Dockerfile" +shared = true`); + + expect(config).toEqual({ + ...defaultConfig(), + drives: { + root: { + builder: "docker", + dockerfile: "backend/Dockerfile", + context: ".", + extraSize: 0, + format: "ext2", + image: undefined, + mount: undefined, + tags: [], + target: undefined, + shared: true, + user: undefined, + }, + }, + }); + }); + + /** + * [machine] + */ + describe("when parsing [machine]", () => { + const config = ` + [machine] + no-rollup = true + `; + it("machine-config", () => { + expect(parse(config)).toEqual({ + ...defaultConfig(), + machine: { + ...defaultMachineConfig(), + noRollup: true, + }, + }); + }); + it("should fail for invalid bootargs", () => { + const invalidConfig = ` + ${config} + bootargs = ["no4lvl", "quiet", false] + `; + expect(() => parse(invalidConfig)).toThrowError( + new InvalidStringValueError(false), + ); + }); + }); + + /** + * [drives] + */ + describe("when parsing [drives]", () => { + it("should fail for invalid configuration", () => { + expect(parse("drives = 42")).toEqual(defaultConfig()); + expect(parse("drives.root = true")).toEqual(defaultConfig()); + expect(parse("drives.root = 42")).toEqual(defaultConfig()); + }); + + it("should fail for invalid builder", () => { + expect(() => + parse('[drives.root]\nbuilder = "invalid"'), + ).toThrowError(new InvalidBuilderError("invalid")); + expect(() => parse("[drives.root]\nbuilder = true")).toThrowError( + new InvalidBuilderError(true), + ); + expect(() => parse("[drives.root]\nbuilder = 10")).toThrowError( + new InvalidBuilderError(10), + ); + expect(() => parse("[drives.root]\nbuilder = {}")).toThrowError( + new InvalidBuilderError({}), + ); + }); + + it("should fail for invalid format", () => { + expect(() => + parse('[drives.root]\nformat = "invalid"'), + ).toThrowError(new InvalidDriveFormatError("invalid")); + expect(() => parse("[drives.root]\nformat = true")).toThrowError( + new InvalidDriveFormatError(true), + ); + expect(() => parse("[drives.root]\nformat = 10")).toThrowError( + new InvalidDriveFormatError(10), + ); + expect(() => parse("[drives.root]\nformat = {}")).toThrowError( + new InvalidDriveFormatError({}), + ); + }); + + it("should fail for invalid filename extension", () => { + const builderNone = ` + [drives.none] + builder = "none" + filename = "./games/doom.xyzfs" + mount = "/usr/local/games/doom" + `; + expect(() => parse(builderNone)).toThrowError( + new InvalidDriveFormatError(".xyzfs"), + ); + }); + + it("should fail for invalid mount", () => { + expect(() => parse("[drives.data]\nmount = 42")).toThrowError( + new InvalidStringValueError(42), + ); + }); + + it("should fail for invalid empty drive format", () => { + expect(() => + parse("[drives.data]\nbuilder = 'empty'\nformat = 42"), + ).toThrowError(new InvalidEmptyDriveFormatError(42)); + }); + }); + + /** + * field types + */ + describe("when parsing fields types", () => { + it("should fail for invalid boolean value", () => { + expect(() => parse("[machine]\nno-rollup = 42")).toThrowError( + new InvalidBooleanValueError(42), + ); + }); + + it("should fail for invalid number value", () => { + expect(() => parse("[machine]\nmax-mcycle = 'abc'")).toThrowError( + new InvalidNumberValueError("abc"), + ); + }); + + it("should fail for invalid string value", () => { + const invalidTarDrive = ` + [drives.data] + builder = "tar" + filename = 42 # invalid + format = "ext2" + `; + expect(() => parse(invalidTarDrive)).toThrowError( + new InvalidStringValueError(42), + ); + }); + + it("should fail for invalid bytes value", () => { + const invalidTarDrive = ` + [drives.data] + builder = "tar" + extraSize = "abc" + filename = "data.tar" + format = "ext2" + `; + expect(() => parse(invalidTarDrive)).toThrowError( + new InvalidBytesValueError("abc"), + ); + }); + + it("should pass for valid bytes value", () => { + // nukmber + expect(() => + parse( + `[drives.data] + builder = "directory" + directory = "/data" + extra-size = 128 + `, + ), + ).not.toThrow(); + // string + expect(() => + parse( + `[drives.data] + builder = "directory" + directory = "/data" + extra-size = "128MB" + `, + ), + ).not.toThrow(); + // bigint + const bigInt = BigInt(128); + expect(() => + parse( + `[drives.data] + builder = "directory" + directory = "/data" + extra-size = ${bigInt} + `, + ), + ).not.toThrow(); + }); + + it("should fail for invalid boolean value", () => { + expect(() => parse("[machine]\nfinal-hash = 42")).toThrowError( + new InvalidBooleanValueError(42), + ); + }); + + it("should fail for invalid optional boolean value", () => { + expect(() => + parse("[machine]\nassert-rolling-template = 42"), + ).toThrowError(new InvalidBooleanValueError(42)); + }); + + it("should fail when required field is not defined", () => { + const invalidDirectoryDrive = ` + [drives.data] + builder = "directory" + # directory = '' # required + `; + expect(() => parse(invalidDirectoryDrive)).toThrowError( + new RequiredFieldError("directory"), //XXX: how to know which field was required + ); + }); + }); +}); diff --git a/apps/cli/test/configs/default.toml b/apps/cli/test/configs/default.toml new file mode 100644 index 00000000..0f570539 --- /dev/null +++ b/apps/cli/test/configs/default.toml @@ -0,0 +1,2 @@ +# a default configuration is an empty one +# meaning a Cartesi project does not require a cartesi.toml config file diff --git a/apps/cli/test/configs/drives/basic.toml b/apps/cli/test/configs/drives/basic.toml new file mode 100644 index 00000000..13587a50 --- /dev/null +++ b/apps/cli/test/configs/drives/basic.toml @@ -0,0 +1,7 @@ +# this is the basic configuration of a root flash drive built with Docker +# this is also the default configuration for a root flash drive + +[drives.root] +builder = "docker" +dockerfile = "Dockerfile" +format = "ext2" diff --git a/apps/cli/test/configs/drives/data.toml b/apps/cli/test/configs/drives/data.toml new file mode 100644 index 00000000..bc0ac0ec --- /dev/null +++ b/apps/cli/test/configs/drives/data.toml @@ -0,0 +1,7 @@ +# example of a drive with project files + +[drives.data] +builder = "directory" +directory = "./data" # required +extraSize = "100Mb" # optional. size is given by directory content size plus this amount +mount = "/var/lib/app" # optional, default is /mnt/{name} diff --git a/apps/cli/test/configs/drives/empty.toml b/apps/cli/test/configs/drives/empty.toml new file mode 100644 index 00000000..7564982d --- /dev/null +++ b/apps/cli/test/configs/drives/empty.toml @@ -0,0 +1,7 @@ +# example of an empty drive to hold application data + +[drives.data] +builder = "empty" +size = "100Mb" # size can be given as string, or as a number in bytes +mount = "/var/lib/app" # default is /mnt/{name} +# format is always ext2, as sqfs is read-only, and a read-only empty drive does not really make sense diff --git a/apps/cli/test/configs/drives/none.toml b/apps/cli/test/configs/drives/none.toml new file mode 100644 index 00000000..aa4c1d4d --- /dev/null +++ b/apps/cli/test/configs/drives/none.toml @@ -0,0 +1,6 @@ +# example of a drive that is ready +# this is useful in case drive was previouly built by another process + +[drives.root] +builder = "none" +filename = "./rootfs-tools-v0.15.0.ext2" diff --git a/apps/cli/test/configs/drives/rives.toml b/apps/cli/test/configs/drives/rives.toml new file mode 100644 index 00000000..83e20898 --- /dev/null +++ b/apps/cli/test/configs/drives/rives.toml @@ -0,0 +1,15 @@ +# example inspired by Rives. Games are added as sqfs files, which come from an existing external build process +[drives.root] +builder = "docker" +dockerfile = "Dockerfile" +format = "ext2" + +[drives.doom] +builder = "none" +filename = "./games/doom.sqfs" +mount = "/usr/local/games/doom" + +[drives.tetrix] +builder = "none" +filename = "./games/tetrix.sqfs" +mount = "/usr/local/games/tetrix" diff --git a/apps/cli/test/configs/drives/tar.toml b/apps/cli/test/configs/drives/tar.toml new file mode 100644 index 00000000..9cd8b95d --- /dev/null +++ b/apps/cli/test/configs/drives/tar.toml @@ -0,0 +1,6 @@ +# example of a drive built with contents of a tar file +# this is useful if the developer wants to take care of the tar procedure + +[drives.data] +builder = "tar" +filename = "build/files.tar" diff --git a/apps/cli/test/configs/full.toml b/apps/cli/test/configs/full.toml new file mode 100644 index 00000000..3cfd8a0c --- /dev/null +++ b/apps/cli/test/configs/full.toml @@ -0,0 +1,46 @@ +# sdk = "cartesi/sdk:0.6.0" +# runtime = "rollups" +# runtime = "lambada" + +# [machine] +# assert-rolling-update = true +# bootargs = ["no4lvl", "quiet", "earlycon=sbi", "console=hvc0", "rootfstype=ext2", "root=/dev/pmem0", "rw", "init=/usr/sbin/cartesi-init"] +# entrypoint = "/usr/local/bin/app" +# final-hash = true +# max-mcycle = 0 +# no-rollup = false +# ram-image = "/usr/share/cartesi-machine/images/linux.bin" # directory inside SDK image +# ram-length = "128Mi" + +# [drives.root] +# builder = "docker" +# dockerfile = "Dockerfile" +# target = "docker-multi-stage-target" +# format = "ext2" +# format = "sqfs" +# extraSize = "100Mb" # optional. size is given by directory content size plus this amount + +# [drives.data] +# builder = "empty" +# size = "100Mb" # size can be given as string, or as a number in bytes +# mount = "/var/lib/app" # default is /mnt/{name} + +# [drives.data] +# builder = "directory" +# directory = "./data" # required +# extraSize = "100Mb" # optional. size is given by directory content size plus this amount +# format = "ext2" +# format = "sqfs" +# mount = "/var/lib/app" # optional, default is /mnt/{name} + +# [drives.data] +# builder = "tar" +# filename = "build/files.tar" +# extraSize = "100Mb" # optional. size is given by directory content size plus this amount +# mount = "/var/lib/app" # optional, default is /mnt/{name} + +# [drives.doom] +# builder = "none" +# filename = "./games/doom.sqfs" +# mount = "/usr/local/games/doom" + diff --git a/apps/cli/test/configs/machine/bootargs.toml b/apps/cli/test/configs/machine/bootargs.toml new file mode 100644 index 00000000..e69de29b diff --git a/apps/cli/test/configs/machine/no_boot.toml b/apps/cli/test/configs/machine/no_boot.toml new file mode 100644 index 00000000..909af54c --- /dev/null +++ b/apps/cli/test/configs/machine/no_boot.toml @@ -0,0 +1,5 @@ +# example of a machine that doesn't run until yield + +[machine] +assert_rolling_update = false +max-mcycle = 0 diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 6d9f7685..6afdf6cb 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -80,6 +80,9 @@ importers: semver: specifier: ^7.6.3 version: 7.6.3 + smol-toml: + specifier: ^1.3.0 + version: 1.3.0 tmp: specifier: ^0.2.3 version: 0.2.3 @@ -5495,6 +5498,10 @@ packages: resolution: {integrity: sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==} engines: {node: '>=12'} + smol-toml@1.3.0: + resolution: {integrity: sha512-tWpi2TsODPScmi48b/OQZGi2lgUmBCHy6SZrhi/FdnnHiU1GwebbCfuQuxsC3nHaLwtYeJGPrDZDIeodDOc4pA==} + engines: {node: '>= 18'} + snake-case@3.0.4: resolution: {integrity: sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==} @@ -12930,6 +12937,8 @@ snapshots: slash@4.0.0: {} + smol-toml@1.3.0: {} + snake-case@3.0.4: dependencies: dot-case: 3.0.4