From 9ab7a3400e2265c920df5b1187d3edfd2d0ac9cc Mon Sep 17 00:00:00 2001 From: David Worms Date: Sun, 19 Nov 2023 10:51:59 +0100 Subject: [PATCH] feat: convert to esm feat: convert to esm --- extra/lxd-runner/lib/actions/delete.js | 5 +- extra/lxd-runner/lib/actions/enter.js | 5 +- extra/lxd-runner/lib/actions/exec.js | 5 +- extra/lxd-runner/lib/actions/info.js | 9 - extra/lxd-runner/lib/actions/run.js | 9 +- extra/lxd-runner/lib/actions/start.js | 5 +- extra/lxd-runner/lib/actions/state.js | 9 +- extra/lxd-runner/lib/actions/stop.js | 5 +- extra/lxd-runner/lib/actions/test.js | 5 +- extra/lxd-runner/lib/index.js | 289 ++++--- extra/lxd-runner/package.json | 10 +- extra/lxd-runner/src/actions/delete.coffee | 6 - extra/lxd-runner/src/actions/enter.coffee | 18 - extra/lxd-runner/src/actions/exec.coffee | 18 - extra/lxd-runner/src/actions/run.coffee | 5 - extra/lxd-runner/src/actions/start.coffee | 5 - extra/lxd-runner/src/actions/state.coffee | 7 - extra/lxd-runner/src/actions/stop.coffee | 5 - extra/lxd-runner/src/actions/test.coffee | 18 - extra/lxd-runner/src/index.coffee | 127 --- packages/core/README.md | 4 +- packages/core/env/arch_chroot/Dockerfile | 37 +- .../core/env/arch_chroot/docker-compose.yml | 17 +- packages/core/env/arch_chroot/test.coffee | 2 +- packages/core/env/centos7/Dockerfile | 29 +- packages/core/env/centos7/docker-compose.yml | 13 +- packages/core/env/centos7/test.coffee | 2 +- packages/core/env/chown/index.coffee | 48 -- packages/core/env/chown/index.js | 58 ++ packages/core/env/chown/test.coffee | 2 +- packages/core/env/run.sh | 4 +- packages/core/env/ssh/index.coffee | 76 -- packages/core/env/ssh/index.js | 88 ++ packages/core/env/ssh/test.coffee | 2 +- packages/core/env/sudo/Dockerfile | 2 +- packages/core/env/sudo/docker-compose.yml | 13 +- packages/core/env/sudo/test.coffee | 2 +- packages/core/env/ubuntu-14.04/Dockerfile | 33 +- .../core/env/ubuntu-14.04/docker-compose.yml | 13 +- packages/core/env/ubuntu-14.04/test.coffee | 2 +- packages/core/env/ubuntu-22.04/Dockerfile | 28 +- .../core/env/ubuntu-22.04/docker-compose.yml | 13 +- packages/core/env/ubuntu-22.04/test.coffee | 2 +- packages/core/lib/actions/assert/index.js | 6 +- packages/core/lib/actions/call/README.md | 2 +- packages/core/lib/actions/call/index.js | 8 +- .../core/lib/actions/execute/assert/index.js | 8 +- .../lib/actions/execute/assert/schema.json | 2 +- packages/core/lib/actions/execute/index.js | 66 +- .../core/lib/actions/execute/wait/index.js | 6 +- .../core/lib/actions/execute/wait/schema.json | 8 +- packages/core/lib/actions/fs/assert/index.js | 15 +- .../core/lib/actions/fs/assert/schema.json | 4 +- .../core/lib/actions/fs/base/chmod/index.js | 4 +- .../core/lib/actions/fs/base/chown/index.js | 4 +- .../core/lib/actions/fs/base/copy/index.js | 8 +- .../actions/fs/base/createReadStream/index.js | 16 +- .../fs/base/createWriteStream/index.js | 50 +- .../fs/base/createWriteStream/schema.json | 2 +- .../core/lib/actions/fs/base/exists/index.js | 4 +- .../core/lib/actions/fs/base/lstat/index.js | 4 +- .../core/lib/actions/fs/base/mkdir/index.js | 6 +- .../lib/actions/fs/base/mkdir/schema.json | 6 +- .../lib/actions/fs/base/readFile/index.js | 6 +- .../lib/actions/fs/base/readFile/schema.json | 2 +- .../core/lib/actions/fs/base/readdir/index.js | 10 +- .../lib/actions/fs/base/readlink/index.js | 4 +- .../core/lib/actions/fs/base/rename/index.js | 4 +- .../core/lib/actions/fs/base/rmdir/index.js | 11 +- .../core/lib/actions/fs/base/stat/README.md | 14 +- .../core/lib/actions/fs/base/stat/index.js | 68 +- .../lib/actions/fs/base/stat/schema.in.json | 3 +- .../lib/actions/fs/base/stat/schema.out.json | 2 +- .../core/lib/actions/fs/base/symlink/index.js | 10 +- .../core/lib/actions/fs/base/unlink/index.js | 8 +- .../lib/actions/fs/base/writeFile/index.js | 6 +- .../lib/actions/fs/base/writeFile/schema.json | 2 +- packages/core/lib/actions/fs/chmod/index.js | 6 +- .../core/lib/actions/fs/chmod/schema.json | 2 +- packages/core/lib/actions/fs/chown/index.js | 7 +- .../core/lib/actions/fs/chown/schema.json | 4 +- packages/core/lib/actions/fs/copy/index.js | 6 +- packages/core/lib/actions/fs/copy/schema.json | 16 +- packages/core/lib/actions/fs/glob/index.js | 10 +- packages/core/lib/actions/fs/hash/index.js | 144 ++-- packages/core/lib/actions/fs/link/index.js | 8 +- packages/core/lib/actions/fs/link/schema.json | 2 +- packages/core/lib/actions/fs/mkdir/index.js | 6 +- .../core/lib/actions/fs/mkdir/schema.json | 12 +- packages/core/lib/actions/fs/move/index.js | 4 +- packages/core/lib/actions/fs/remove/index.js | 6 +- packages/core/lib/actions/fs/wait/index.js | 6 +- packages/core/lib/actions/ssh/close/index.js | 8 +- packages/core/lib/actions/ssh/open/README.md | 2 +- packages/core/lib/actions/ssh/open/index.js | 63 +- .../core/lib/actions/ssh/open/schema.json | 2 +- packages/core/lib/actions/ssh/root/index.js | 133 ++- packages/core/lib/actions/wait/index.js | 4 +- packages/core/lib/index.js | 116 ++- packages/core/lib/plugins/args.js | 23 +- .../{assertions/index.js => assertions.js} | 12 +- .../core/lib/plugins/assertions/exists.js | 18 +- .../{conditions/index.js => conditions.js} | 16 +- .../core/lib/plugins/conditions/execute.js | 16 +- .../core/lib/plugins/conditions/exists.js | 12 +- packages/core/lib/plugins/conditions/os.js | 45 +- packages/core/lib/plugins/global.js | 23 +- packages/core/lib/plugins/history.js | 17 +- packages/core/lib/plugins/magic_dollar.js | 28 +- .../plugins/metadata/argument_to_config.js | 10 +- packages/core/lib/plugins/metadata/audit.js | 193 +++++ packages/core/lib/plugins/metadata/debug.js | 25 +- .../core/lib/plugins/metadata/disabled.js | 28 +- packages/core/lib/plugins/metadata/execute.js | 59 +- packages/core/lib/plugins/metadata/header.js | 11 +- .../core/lib/plugins/metadata/position.js | 22 +- packages/core/lib/plugins/metadata/raw.js | 48 +- packages/core/lib/plugins/metadata/relax.js | 33 +- packages/core/lib/plugins/metadata/retry.js | 10 +- packages/core/lib/plugins/metadata/schema.js | 18 +- packages/core/lib/plugins/metadata/time.js | 25 +- packages/core/lib/plugins/metadata/tmpdir.js | 155 ++-- packages/core/lib/plugins/metadata/uuid.js | 11 +- packages/core/lib/plugins/output/logs.js | 22 +- packages/core/lib/plugins/output/status.js | 16 +- .../core/lib/plugins/pubsub/engines/memory.js | 2 +- packages/core/lib/plugins/pubsub/index.js | 8 +- packages/core/lib/plugins/ssh.js | 69 +- packages/core/lib/plugins/templated.js | 29 +- packages/core/lib/plugins/tools/dig.js | 10 +- packages/core/lib/plugins/tools/events.js | 47 +- packages/core/lib/plugins/tools/find.js | 8 +- packages/core/lib/plugins/tools/log.js | 21 +- packages/core/lib/plugins/tools/path.js | 37 +- packages/core/lib/plugins/tools/schema.js | 30 +- packages/core/lib/plugins/tools/walk.js | 11 +- packages/core/lib/register.js | 91 +- packages/core/lib/registry.js | 16 +- packages/core/lib/schedulers/index.js | 6 +- packages/core/lib/session.js | 38 +- packages/core/lib/session/contextualize.js | 78 +- packages/core/lib/session/normalize.js | 11 +- packages/core/lib/utils/array.js | 240 +++--- packages/core/lib/utils/buffer.js | 18 +- packages/core/lib/utils/error.js | 50 +- packages/core/lib/utils/index.js | 63 +- packages/core/lib/utils/mode.js | 61 +- packages/core/lib/utils/object.js | 328 ++++---- packages/core/lib/utils/os.js | 175 ++-- packages/core/lib/utils/promise.js | 40 +- packages/core/lib/utils/regexp.js | 28 +- packages/core/lib/utils/schema.js | 25 +- packages/core/lib/utils/semver.js | 69 +- packages/core/lib/utils/ssh.js | 72 +- packages/core/lib/utils/stats.js | 113 ++- packages/core/lib/utils/string.js | 224 ++--- packages/core/lib/utils/tilde.js | 36 +- packages/core/package.json | 26 +- packages/core/test.sample.coffee | 6 +- packages/core/test/actions/assert.coffee | 8 +- packages/core/test/actions/call.coffee | 11 +- .../core/test/actions/execute/assert.coffee | 9 +- .../actions/execute/config.arch_linux.coffee | 13 +- .../test/actions/execute/config.bash.coffee | 9 +- .../test/actions/execute/config.code.coffee | 9 +- .../test/actions/execute/config.cwd.coffee | 9 +- .../test/actions/execute/config.env.coffee | 11 +- .../actions/execute/config.env_export.coffee | 11 +- .../test/actions/execute/config.format.coffee | 32 +- .../test/actions/execute/config.stdio.coffee | 9 +- .../test/actions/execute/config.sudo.coffee | 9 +- .../core/test/actions/execute/index.coffee | 130 +-- .../core/test/actions/execute/wait.coffee | 9 +- packages/core/test/actions/fs/assert.coffee | 12 +- .../core/test/actions/fs/base/chmod.coffee | 13 +- .../core/test/actions/fs/base/chown.coffee | 13 +- .../core/test/actions/fs/base/copy.coffee | 10 +- .../actions/fs/base/createReadStream.coffee | 10 +- .../fs/base/createReadStream.sudo.coffee | 12 +- .../actions/fs/base/createWriteStream.coffee | 12 +- .../fs/base/createWriteStream.sudo.coffee | 80 +- .../core/test/actions/fs/base/exists.coffee | 10 +- .../core/test/actions/fs/base/lstat.coffee | 12 +- .../core/test/actions/fs/base/mkdir.coffee | 12 +- .../core/test/actions/fs/base/readFile.coffee | 12 +- .../core/test/actions/fs/base/readdir.coffee | 14 +- .../core/test/actions/fs/base/readlink.coffee | 10 +- .../core/test/actions/fs/base/rename.coffee | 11 +- .../core/test/actions/fs/base/rmdir.coffee | 12 +- .../core/test/actions/fs/base/stat.coffee | 12 +- .../core/test/actions/fs/base/symlink.coffee | 12 +- .../core/test/actions/fs/base/unlink.coffee | 12 +- .../test/actions/fs/base/writeFile.coffee | 10 +- .../actions/fs/base/writeFile.sudo.coffee | 10 +- packages/core/test/actions/fs/chmod.coffee | 11 +- packages/core/test/actions/fs/chown.coffee | 11 +- packages/core/test/actions/fs/copy.coffee | 10 +- packages/core/test/actions/fs/glob.coffee | 15 +- packages/core/test/actions/fs/hash.coffee | 11 +- packages/core/test/actions/fs/link.coffee | 44 +- packages/core/test/actions/fs/mkdir.coffee | 15 +- packages/core/test/actions/fs/move.coffee | 10 +- packages/core/test/actions/fs/remove.coffee | 10 +- packages/core/test/actions/fs/wait.coffee | 10 +- packages/core/test/actions/ssh/close.coffee | 10 +- packages/core/test/actions/ssh/open.coffee | 15 +- packages/core/test/actions/ssh/root.coffee | 13 +- packages/core/test/actions/wait.coffee | 9 +- packages/core/test/loaders/all.js | 16 + packages/core/test/loaders/coffee.js | 20 + packages/core/test/plugins/args.coffee | 6 +- .../test/plugins/assertions/assert.coffee | 6 +- .../plugins/assertions/assert_exists.coffee | 9 +- .../test/plugins/assertions/unassert.coffee | 6 +- .../plugins/assertions/unassert_exists.coffee | 9 +- .../core/test/plugins/conditions/if.coffee | 6 +- .../test/plugins/conditions/if_execute.coffee | 9 +- .../test/plugins/conditions/if_exists.coffee | 9 +- .../core/test/plugins/conditions/if_os.coffee | 47 +- .../core/test/plugins/conditions/index.coffee | 6 +- .../test/plugins/conditions/unless.coffee | 6 +- .../plugins/conditions/unless_execute.coffee | 9 +- .../plugins/conditions/unless_exists.coffee | 9 +- .../test/plugins/conditions/unless_os.coffee | 15 +- .../core/test/plugins/execute.sudo.coffee | 9 +- packages/core/test/plugins/global.coffee | 6 +- packages/core/test/plugins/history.coffee | 6 +- .../core/test/plugins/magic_dollar.coffee | 6 +- .../test/plugins/metadata/argument.coffee | 6 +- .../metadata/argument_to_config.coffee | 6 +- .../core/test/plugins/metadata/attempt.coffee | 6 +- .../core/test/plugins/metadata/debug.coffee | 8 +- .../test/plugins/metadata/definitions.coffee | 6 +- .../core/test/plugins/metadata/depth.coffee | 12 +- .../test/plugins/metadata/disabled.coffee | 6 +- .../core/test/plugins/metadata/execute.coffee | 9 +- .../core/test/plugins/metadata/header.coffee | 6 +- .../core/test/plugins/metadata/index.coffee | 8 +- .../test/plugins/metadata/position.coffee | 8 +- .../core/test/plugins/metadata/raw.coffee | 6 +- .../test/plugins/metadata/raw_input.coffee | 6 +- .../test/plugins/metadata/raw_output.coffee | 6 +- .../core/test/plugins/metadata/relax.coffee | 8 +- .../core/test/plugins/metadata/retry.coffee | 6 +- .../core/test/plugins/metadata/schema.coffee | 6 +- .../core/test/plugins/metadata/shy.coffee | 6 +- .../core/test/plugins/metadata/sleep.coffee | 6 +- .../core/test/plugins/metadata/tmpdir.coffee | 15 +- .../test/plugins/metadata/tmpdir.sudo.coffee | 15 +- .../core/test/plugins/metadata/uuid.coffee | 6 +- packages/core/test/plugins/output/logs.coffee | 8 +- .../core/test/plugins/output/status.coffee | 6 +- packages/core/test/plugins/pubsub.coffee | 11 +- packages/core/test/plugins/ssh.coffee | 17 +- packages/core/test/plugins/templated.coffee | 6 +- packages/core/test/plugins/time.coffee | 6 +- packages/core/test/plugins/tools/dig.coffee | 6 +- .../core/test/plugins/tools/events.coffee | 6 +- packages/core/test/plugins/tools/find.coffee | 6 +- packages/core/test/plugins/tools/log.coffee | 14 +- packages/core/test/plugins/tools/path.coffee | 11 +- .../test/plugins/tools/schema.boolean.coffee | 6 +- .../core/test/plugins/tools/schema.coffee | 6 +- .../plugins/tools/schema.instanceof.coffee | 8 +- .../core/test/plugins/tools/schema.ref.coffee | 16 +- .../core/test/plugins/tools/status.coffee | 6 +- packages/core/test/plugins/tools/walk.coffee | 6 +- packages/core/test/registry/create.coffee | 6 +- packages/core/test/registry/deprecate.coffee | 6 +- packages/core/test/registry/get.coffee | 8 +- packages/core/test/registry/index.coffee | 8 +- packages/core/test/registry/register.coffee | 20 +- packages/core/test/registry/registered.coffee | 8 +- packages/core/test/registry/unregister.coffee | 6 +- .../core/test/session/action/config.coffee | 6 +- .../core/test/session/action/handler.coffee | 6 +- .../core/test/session/contextualize.coffee | 7 +- packages/core/test/session/creation.coffee | 8 +- packages/core/test/session/error.coffee | 8 +- packages/core/test/session/namespace.coffee | 8 +- packages/core/test/session/normalize.coffee | 8 +- .../test/session/plugins/on_action.coffee | 6 +- .../test/session/plugins/on_normalize.coffee | 6 +- .../session/plugins/session.action.coffee | 8 +- .../session/plugins/session.register.coffee | 8 +- .../session/plugins/session.resolved.coffee | 8 +- .../session/plugins/session.result.coffee | 12 +- packages/core/test/session/registry.coffee | 8 +- .../core/test/session/scheduler/error.coffee | 6 +- .../core/test/session/scheduler/flow.coffee | 6 +- .../core/test/session/scheduler/index.coffee | 6 +- .../session/scheduler/option.strict.coffee | 6 +- packages/core/test/test.coffee | 26 +- packages/core/test/utils/array.coffee | 6 +- packages/core/test/utils/buffer.coffee | 6 +- packages/core/test/utils/error.coffee | 6 +- packages/core/test/utils/mode.coffee | 6 +- packages/core/test/utils/object.coffee | 6 +- packages/core/test/utils/os.coffee | 10 +- packages/core/test/utils/promise.coffee | 6 +- packages/core/test/utils/regexp.coffee | 6 +- packages/core/test/utils/semver.coffee | 6 +- packages/core/test/utils/ssh.coffee | 13 +- packages/core/test/utils/stats.coffee | 18 +- packages/core/test/utils/string.coffee | 6 +- packages/core/test/utils/tilde.coffee | 6 +- packages/db/README.md | 17 + packages/db/env/mariadb/Dockerfile | 60 +- packages/db/env/mariadb/docker-compose.yml | 27 +- packages/db/env/mariadb/entrypoint.sh | 15 +- packages/db/env/mariadb/run.sh | 4 +- packages/db/env/mariadb/test.coffee | 3 +- packages/db/env/mysql/Dockerfile | 59 +- packages/db/env/mysql/docker-compose.yml | 29 +- packages/db/env/mysql/entrypoint.sh | 20 +- packages/db/env/mysql/run.sh | 4 +- packages/db/env/mysql/test.coffee | 3 +- packages/db/env/postgresql/Dockerfile | 59 +- packages/db/env/postgresql/docker-compose.yml | 29 +- packages/db/env/postgresql/entrypoint.sh | 19 +- packages/db/env/postgresql/run.sh | 4 +- packages/db/env/postgresql/test.coffee | 3 +- packages/db/lib/database/exists/README.md | 2 +- packages/db/lib/database/exists/index.js | 4 +- packages/db/lib/database/exists/schema.json | 2 +- packages/db/lib/database/index.js | 8 +- packages/db/lib/database/remove/index.js | 4 +- packages/db/lib/database/remove/schema.json | 2 +- packages/db/lib/database/schema.json | 2 +- packages/db/lib/database/wait/index.js | 4 +- packages/db/lib/database/wait/schema.json | 2 +- packages/db/lib/query/index.js | 6 +- packages/db/lib/query/schema.json | 2 +- packages/db/lib/register.js | 37 +- packages/db/lib/schema/exists/index.js | 4 +- packages/db/lib/schema/exists/schema.json | 2 +- packages/db/lib/schema/index.js | 6 +- packages/db/lib/schema/list/index.js | 6 +- packages/db/lib/schema/list/schema.json | 2 +- packages/db/lib/schema/remove/index.js | 4 +- packages/db/lib/schema/remove/schema.json | 2 +- packages/db/lib/schema/schema.json | 2 +- packages/db/lib/user/exists/index.js | 6 +- packages/db/lib/user/exists/schema.json | 2 +- packages/db/lib/user/index.js | 41 +- packages/db/lib/user/remove/index.js | 4 +- packages/db/lib/user/remove/schema.json | 2 +- packages/db/lib/user/schema.json | 2 +- packages/db/lib/utils/db.js | 167 ++++ packages/db/lib/utils/index.js | 152 +--- packages/db/package.json | 27 +- packages/db/test.sample.coffee | 4 +- packages/db/test/database/exists.coffee | 22 +- packages/db/test/database/index.coffee | 66 +- packages/db/test/database/wait.coffee | 28 +- packages/db/test/query.coffee | 202 +++-- packages/db/test/schema/exists.coffee | 21 +- packages/db/test/schema/index.coffee | 59 +- packages/db/test/schema/list.coffee | 33 +- packages/db/test/schema/remove.coffee | 31 +- packages/db/test/test.coffee | 26 +- packages/db/test/user/exists.coffee | 24 +- packages/db/test/user/index.coffee | 51 +- packages/db/test/utils/command.coffee | 112 +++ packages/db/test/utils/escape.coffee | 16 + packages/db/test/utils/index.coffee | 142 ---- packages/db/test/utils/jdbc.coffee | 17 + packages/docker/README.md | 13 + packages/docker/env/docker/Dockerfile | 50 +- packages/docker/env/docker/docker-compose.yml | 14 +- packages/docker/env/docker/entrypoint.sh | 18 +- packages/docker/env/docker/run.sh | 4 +- packages/docker/env/docker/test.coffee | 7 +- packages/docker/lib/build/index.js | 117 +-- packages/docker/lib/build/schema.json | 2 +- packages/docker/lib/compose/index.js | 8 +- packages/docker/lib/cp/index.js | 11 +- packages/docker/lib/cp/schema.json | 2 +- packages/docker/lib/exec/index.js | 4 +- packages/docker/lib/exec/schema.json | 8 +- packages/docker/lib/images/README.md | 35 + packages/docker/lib/images/index.js | 43 + packages/docker/lib/images/schema.json | 33 + packages/docker/lib/inspect/index.js | 4 +- packages/docker/lib/inspect/schema.json | 2 +- packages/docker/lib/kill/index.js | 4 +- packages/docker/lib/kill/schema.json | 2 +- packages/docker/lib/load/index.js | 132 ++- packages/docker/lib/load/schema.json | 2 +- packages/docker/lib/login/index.js | 7 +- packages/docker/lib/login/schema.json | 2 +- packages/docker/lib/logout/index.js | 7 +- packages/docker/lib/logout/schema.json | 2 +- packages/docker/lib/pause/index.js | 4 +- packages/docker/lib/pause/schema.json | 2 +- packages/docker/lib/pull/index.js | 4 +- packages/docker/lib/pull/schema.json | 2 +- packages/docker/lib/register.js | 66 +- packages/docker/lib/restart/index.js | 4 +- packages/docker/lib/restart/schema.json | 2 +- packages/docker/lib/rm/index.js | 4 +- packages/docker/lib/rm/schema.json | 2 +- packages/docker/lib/rmi/index.js | 4 +- packages/docker/lib/rmi/schema.json | 2 +- packages/docker/lib/run/index.js | 6 +- packages/docker/lib/run/schema.json | 2 +- packages/docker/lib/save/index.js | 4 +- packages/docker/lib/save/schema.json | 2 +- packages/docker/lib/start/index.js | 6 +- packages/docker/lib/start/schema.json | 2 +- packages/docker/lib/stop/index.js | 6 +- packages/docker/lib/stop/schema.json | 2 +- packages/docker/lib/tools/checksum/index.js | 17 +- .../docker/lib/tools/checksum/schema.json | 2 +- packages/docker/lib/tools/execute/index.js | 10 +- packages/docker/lib/tools/execute/schema.json | 4 +- packages/docker/lib/tools/service/index.js | 4 +- packages/docker/lib/tools/service/schema.json | 2 +- packages/docker/lib/tools/status/index.js | 4 +- packages/docker/lib/tools/status/schema.json | 2 +- packages/docker/lib/unpause/index.js | 4 +- packages/docker/lib/unpause/schema.json | 2 +- packages/docker/lib/utils/docker.js | 132 +++ packages/docker/lib/utils/index.js | 107 +-- packages/docker/lib/volume_create/index.js | 4 +- packages/docker/lib/volume_create/schema.json | 2 +- packages/docker/lib/volume_rm/index.js | 4 +- packages/docker/lib/volume_rm/schema.json | 2 +- packages/docker/lib/wait/index.js | 4 +- packages/docker/lib/wait/schema.json | 2 +- packages/docker/package.json | 30 +- packages/docker/test.sample.coffee | 4 +- packages/docker/test/build.coffee | 65 +- packages/docker/test/compose/index.coffee | 20 +- packages/docker/test/cp.coffee | 78 +- packages/docker/test/exec.coffee | 34 +- packages/docker/test/images.coffee | 80 ++ packages/docker/test/inspect.coffee | 26 +- packages/docker/test/kill.coffee | 32 +- packages/docker/test/load.coffee | 20 +- packages/docker/test/pull.coffee | 28 +- packages/docker/test/rm.coffee | 28 +- packages/docker/test/rmi.coffee | 20 +- packages/docker/test/run.coffee | 53 +- packages/docker/test/save.coffee | 20 +- packages/docker/test/start.coffee | 32 +- packages/docker/test/stop.coffee | 24 +- packages/docker/test/test.coffee | 53 +- packages/docker/test/tools/checksum.coffee | 22 +- packages/docker/test/tools/execute.coffee | 14 +- packages/docker/test/tools/service.coffee | 30 +- packages/docker/test/tools/status.coffee | 26 +- packages/docker/test/volume_create.coffee | 20 +- packages/docker/test/volume_rm.coffee | 20 +- packages/docker/test/wait.coffee | 16 +- packages/file/README.md | 17 + packages/file/lib/cache/index.js | 22 +- packages/file/lib/cson/index.js | 8 +- packages/file/lib/cson/schema.json | 12 +- packages/file/lib/download/README.md | 4 +- packages/file/lib/download/index.js | 211 ++--- packages/file/lib/download/schema.json | 6 +- packages/file/lib/index.js | 46 +- packages/file/lib/ini/README.md | 2 +- packages/file/lib/ini/index.js | 8 +- packages/file/lib/ini/read/index.js | 8 +- packages/file/lib/ini/read/schema.json | 2 +- packages/file/lib/ini/schema.json | 14 +- packages/file/lib/json/index.js | 6 +- packages/file/lib/properties/index.js | 6 +- packages/file/lib/properties/read/index.js | 6 +- packages/file/lib/properties/read/schema.json | 2 +- packages/file/lib/properties/schema.json | 12 +- packages/file/lib/register.js | 61 +- packages/file/lib/render/index.js | 8 +- packages/file/lib/render/schema.json | 20 +- packages/file/lib/schema.json | 8 +- packages/file/lib/touch/index.js | 4 +- packages/file/lib/touch/schema.json | 6 +- packages/file/lib/types/ceph_conf/index.js | 8 +- packages/file/lib/types/hfile/index.js | 6 +- packages/file/lib/types/hfile/schema.json | 18 +- packages/file/lib/types/krb5_conf/index.js | 6 +- packages/file/lib/types/locale_gen/index.js | 6 +- packages/file/lib/types/my_cnf/index.js | 6 +- packages/file/lib/types/pacman_conf/index.js | 8 +- .../lib/types/ssh_authorized_keys/index.js | 8 +- .../lib/types/ssh_authorized_keys/schema.json | 2 +- .../file/lib/types/systemd/resolved/index.js | 8 +- .../file/lib/types/systemd/timesyncd/index.js | 8 +- .../file/lib/types/wireguard_conf/index.js | 8 +- packages/file/lib/types/yum_repo/index.js | 32 +- packages/file/lib/types/yum_repo/schema.json | 2 +- packages/file/lib/upload/index.js | 12 +- packages/file/lib/upload/schema.json | 6 +- packages/file/lib/utils/diff.js | 15 +- packages/file/lib/utils/hfile.js | 22 +- packages/file/lib/utils/index.js | 14 +- packages/file/lib/utils/ini.js | 775 +++++++++-------- packages/file/lib/utils/partial.js | 6 +- packages/file/lib/yaml/index.js | 10 +- packages/file/lib/yaml/schema.json | 6 +- packages/file/package.json | 28 +- packages/file/test.sample.coffee | 4 +- packages/file/test/cache.file.coffee | 25 +- packages/file/test/cache.http.coffee | 18 +- packages/file/test/cson.coffee | 10 +- packages/file/test/download.zip | Bin 1506 -> 0 bytes packages/file/test/download_file.coffee | 152 ++-- packages/file/test/download_url.coffee | 54 +- packages/file/test/index.coffee | 396 ++++----- packages/file/test/index.diff.coffee | 10 +- packages/file/test/ini/index.coffee | 10 +- ...index.stringify_brackets_then_curly.coffee | 12 +- .../ini/index.stringify_single_key.coffee | 12 +- packages/file/test/ini/read.coffee | 10 +- packages/file/test/json.coffee | 10 +- packages/file/test/properties/index.coffee | 10 +- packages/file/test/properties/read.coffee | 10 +- packages/file/test/render.coffee | 10 +- packages/file/test/test.coffee | 26 +- packages/file/test/touch.coffee | 11 +- packages/file/test/types/ceph_conf.coffee | 10 +- packages/file/test/types/hfile.coffee | 10 +- packages/file/test/types/krb5_conf.coffee | 10 +- packages/file/test/types/locale_gen.coffee | 10 +- packages/file/test/types/my_cnf.coffee | 10 +- packages/file/test/types/pacman_conf.coffee | 10 +- .../test/types/ssh_authorized_keys.coffee | 10 +- .../file/test/types/systemd/resolved.coffee | 10 +- .../file/test/types/systemd/timesyncd.coffee | 10 +- packages/file/test/types/test.coffee | 14 - .../file/test/types/wireguard_conf.coffee | 10 +- packages/file/test/types/yum_repo.coffee | 36 +- packages/file/test/upload.coffee | 80 +- packages/file/test/utils/ini/parse.coffee | 7 +- .../ini/parse_brackets_then_curly.coffee | 7 +- .../utils/ini/parse_multi_brackets.coffee | 7 +- .../parse_multi_brackets_multi_lines.coffee | 7 +- .../file/test/utils/ini/split_by_dots.coffee | 7 +- packages/file/test/utils/ini/stringify.coffee | 7 +- .../ini/stringify_brackets_then_curly.coffee | 7 +- .../utils/ini/stringify_multi_brackets.coffee | 7 +- packages/file/test/yaml.coffee | 10 +- packages/ipa/README.md | 41 +- packages/ipa/env/ipa/index.coffee | 145 ---- packages/ipa/env/ipa/index.js | 174 ++++ packages/ipa/env/ipa/test.coffee | 2 +- packages/ipa/env/run.sh | 2 +- packages/ipa/lib/group/add_member/index.js | 4 +- packages/ipa/lib/group/add_member/schema.json | 2 +- packages/ipa/lib/group/del/index.js | 4 +- packages/ipa/lib/group/del/schema.json | 2 +- packages/ipa/lib/group/exists/index.js | 4 +- packages/ipa/lib/group/exists/schema.json | 2 +- packages/ipa/lib/group/index.js | 4 +- packages/ipa/lib/group/schema.json | 2 +- packages/ipa/lib/group/show/index.js | 4 +- packages/ipa/lib/group/show/schema.json | 2 +- packages/ipa/lib/register.js | 49 +- packages/ipa/lib/service/del/index.js | 4 +- packages/ipa/lib/service/del/schema.json | 2 +- packages/ipa/lib/service/exists/index.js | 4 +- packages/ipa/lib/service/exists/schema.json | 2 +- packages/ipa/lib/service/index.js | 58 +- packages/ipa/lib/service/schema.json | 2 +- packages/ipa/lib/service/show/index.js | 4 +- packages/ipa/lib/service/show/schema.json | 2 +- packages/ipa/lib/user/del/index.js | 4 +- packages/ipa/lib/user/del/schema.json | 2 +- packages/ipa/lib/user/disable/index.js | 4 +- packages/ipa/lib/user/disable/schema.json | 2 +- packages/ipa/lib/user/enable/index.js | 4 +- packages/ipa/lib/user/enable/schema.json | 2 +- packages/ipa/lib/user/exists/index.js | 4 +- packages/ipa/lib/user/exists/schema.json | 2 +- packages/ipa/lib/user/find/index.js | 4 +- packages/ipa/lib/user/find/schema.json | 2 +- packages/ipa/lib/user/index.js | 4 +- packages/ipa/lib/user/schema.json | 2 +- packages/ipa/lib/user/show/index.js | 4 +- packages/ipa/lib/user/show/schema.json | 2 +- packages/ipa/lib/user/status/index.js | 15 +- packages/ipa/lib/user/status/schema.json | 2 +- packages/ipa/package.json | 35 +- packages/ipa/test.sample.coffee | 4 +- packages/ipa/test/group/add_member.coffee | 21 +- packages/ipa/test/group/del.coffee | 16 +- packages/ipa/test/group/exists.coffee | 18 +- packages/ipa/test/group/index.coffee | 34 +- packages/ipa/test/group/show.coffee | 14 +- packages/ipa/test/service/del.coffee | 18 +- packages/ipa/test/service/exists.coffee | 20 +- packages/ipa/test/service/index.coffee | 25 +- packages/ipa/test/service/show.coffee | 14 +- packages/ipa/test/test.coffee | 26 +- packages/ipa/test/user/del.coffee | 20 +- packages/ipa/test/user/disable.coffee | 28 +- packages/ipa/test/user/enable.coffee | 28 +- packages/ipa/test/user/exists.coffee | 20 +- packages/ipa/test/user/find.coffee | 30 +- packages/ipa/test/user/index.coffee | 40 +- packages/ipa/test/user/show.coffee | 16 +- packages/ipa/test/user/status.coffee | 16 +- packages/java/README.md | 21 + packages/java/env/openjdk9/docker-compose.yml | 26 + packages/java/env/openjdk9/entrypoint.sh | 17 + packages/java/env/openjdk9/nodejs/Dockerfile | 27 + .../env/ubuntu => java/env/openjdk9}/run.sh | 0 packages/java/env/openjdk9/target/Dockerfile | 26 + packages/java/env/openjdk9/test.coffee | 17 + packages/java/env/run.sh | 7 + .../{keystore_add => keystore/add}/README.md | 6 +- packages/java/lib/keystore/add/index.js | 217 +++++ .../add}/schema.json | 4 +- packages/java/lib/keystore/exists/README.md | 4 + packages/java/lib/keystore/exists/index.js | 44 + packages/java/lib/keystore/exists/schema.json | 29 + .../remove}/README.md | 6 +- packages/java/lib/keystore/remove/index.js | 45 + .../remove}/schema.json | 3 +- packages/java/lib/keystore_add/index.js | 198 ----- packages/java/lib/keystore_remove/index.js | 39 - packages/java/lib/register.js | 25 +- packages/java/package.json | 28 +- packages/java/test.sample.coffee | 4 +- .../add.coffee} | 180 ++-- packages/java/test/keystore/exists.coffee | 39 + packages/java/test/keystore/remove.coffee | 120 +++ packages/java/test/keystore_remove.coffee | 125 --- .../{keystore => resources}/certs1/cacert.pem | 0 .../{keystore => resources}/certs1/cacert.seq | 0 .../certs1/cacert_key.pem | 0 .../{keystore => resources}/certs1/generate | 0 .../certs1/node_1_cert.pem | 0 .../certs1/node_1_key.pem | 0 .../{keystore => resources}/certs2/cacert.pem | 0 .../{keystore => resources}/certs2/cacert.seq | 0 .../certs2/cacert_key.pem | 0 .../{keystore => resources}/certs2/generate | 0 .../certs2/node_1_cert.pem | 0 .../certs2/node_1_key.pem | 0 packages/java/test/test.coffee | 26 +- packages/krb5/README.md | 18 + packages/krb5/env/krb5/nodejs/Dockerfile | 82 +- packages/krb5/env/krb5/nodejs/entrypoint.sh | 20 +- packages/krb5/env/krb5/server/Dockerfile | 5 +- packages/krb5/env/krb5/test.coffee | 4 +- packages/krb5/lib/addprinc/index.js | 6 +- packages/krb5/lib/addprinc/schema.json | 2 +- packages/krb5/lib/delprinc/index.js | 6 +- packages/krb5/lib/delprinc/schema.json | 2 +- packages/krb5/lib/execute/index.js | 6 +- packages/krb5/lib/ktadd/index.js | 8 +- packages/krb5/lib/ktadd/schema.json | 8 +- packages/krb5/lib/ktutil/add/index.js | 135 +-- packages/krb5/lib/ktutil/add/schema.json | 8 +- packages/krb5/lib/register.js | 25 +- packages/krb5/lib/ticket/index.js | 8 +- packages/krb5/lib/ticket/schema.json | 4 +- packages/krb5/lib/utils/index.js | 7 +- packages/krb5/lib/utils/krb5.js | 48 +- packages/krb5/package.json | 27 +- packages/krb5/test.sample.coffee | 4 +- packages/krb5/test/addprinc.coffee | 80 +- packages/krb5/test/delprinc.coffee | 22 +- packages/krb5/test/execute.coffee | 24 +- packages/krb5/test/ktadd.coffee | 38 +- packages/krb5/test/ktutil/add.coffee | 40 +- packages/krb5/test/test.coffee | 26 +- packages/krb5/test/ticket.coffee | 32 +- packages/krb5/test/utils/krb5.coffee | 7 +- packages/ldap/README.md | 30 + packages/ldap/env/openldap/Dockerfile | 78 +- packages/ldap/env/openldap/entrypoint.sh | 14 +- packages/ldap/env/openldap/run.sh | 2 +- packages/ldap/env/openldap/test.coffee | 2 +- packages/ldap/lib/acl/index.js | 46 +- packages/ldap/lib/add/index.js | 20 +- packages/ldap/lib/delete/index.js | 12 +- packages/ldap/lib/index/index.js | 21 +- packages/ldap/lib/modify/index.js | 11 +- packages/ldap/lib/modify/schema.json | 2 +- packages/ldap/lib/register.js | 41 +- packages/ldap/lib/schema/index.js | 39 +- packages/ldap/lib/schema/schema.json | 2 +- packages/ldap/lib/search/index.js | 7 +- packages/ldap/lib/tools/database/index.js | 4 +- packages/ldap/lib/tools/database/schema.json | 2 +- packages/ldap/lib/tools/databases/index.js | 6 +- packages/ldap/lib/tools/databases/schema.json | 2 +- packages/ldap/lib/user/index.js | 35 +- packages/ldap/lib/utils/index.js | 6 +- packages/ldap/lib/utils/ldap.js | 2 +- packages/ldap/package.json | 27 +- packages/ldap/test.sample.coffee | 4 +- packages/ldap/test/acl.coffee | 56 +- packages/ldap/test/add.coffee | 36 +- packages/ldap/test/index.coffee | 62 +- packages/ldap/test/modify.coffee | 26 +- packages/ldap/test/schema.coffee | 22 +- packages/ldap/test/search.coffee | 18 +- packages/ldap/test/test.coffee | 26 +- packages/ldap/test/tools/database.coffee | 24 +- packages/ldap/test/tools/databases.coffee | 18 +- packages/ldap/test/user.coffee | 34 +- packages/ldap/test/utils/ldap.coffee | 7 +- packages/log/README.md | 17 + packages/log/lib/cli/index.js | 12 +- packages/log/lib/cli/schema.json | 6 +- packages/log/lib/csv/index.js | 6 +- packages/log/lib/csv/schema.json | 2 +- packages/log/lib/fs/index.js | 25 +- packages/log/lib/md/index.js | 8 +- packages/log/lib/md/schema.json | 2 +- packages/log/lib/register.js | 26 +- packages/log/lib/stream/index.js | 4 +- packages/log/package.json | 23 +- packages/log/test.sample.coffee | 4 +- packages/log/test/cli.coffee | 13 +- packages/log/test/csv.coffee | 11 +- packages/log/test/fs.coffee | 15 +- packages/log/test/md.coffee | 14 +- packages/log/test/test.coffee | 26 +- packages/lxd/README.md | 67 +- packages/lxd/assets/multipass.sh | 24 +- packages/lxd/lib/cluster/cli/index.js | 46 +- packages/lxd/lib/cluster/cli/start/index.js | 12 +- packages/lxd/lib/cluster/cli/stop/index.js | 8 +- packages/lxd/lib/cluster/delete/index.js | 4 +- packages/lxd/lib/cluster/delete/schema.json | 4 +- packages/lxd/lib/cluster/index.js | 8 +- .../lxd/lib/cluster/samples/three_nodes.js | 10 +- packages/lxd/lib/cluster/schema.json | 12 +- packages/lxd/lib/cluster/stop/index.js | 4 +- packages/lxd/lib/cluster/stop/schema.json | 2 +- .../lxd/lib/config/device/delete/index.js | 4 +- .../lxd/lib/config/device/delete/schema.json | 2 +- .../lxd/lib/config/device/exists/index.js | 4 +- .../lxd/lib/config/device/exists/schema.json | 2 +- packages/lxd/lib/config/device/index.js | 15 +- packages/lxd/lib/config/device/schema.json | 2 +- packages/lxd/lib/config/device/show/index.js | 4 +- .../lxd/lib/config/device/show/schema.json | 2 +- packages/lxd/lib/config/set/index.js | 35 +- packages/lxd/lib/config/set/schema.json | 2 +- packages/lxd/lib/delete/index.js | 6 +- packages/lxd/lib/delete/schema.json | 2 +- packages/lxd/lib/exec/index.js | 9 +- packages/lxd/lib/exec/schema.json | 8 +- packages/lxd/lib/exists/index.js | 4 +- packages/lxd/lib/exists/schema.json | 2 +- packages/lxd/lib/file/exists/index.js | 4 +- packages/lxd/lib/file/exists/schema.json | 2 +- packages/lxd/lib/file/pull/index.js | 10 +- packages/lxd/lib/file/pull/schema.json | 4 +- packages/lxd/lib/file/push/index.js | 10 +- packages/lxd/lib/file/push/schema.json | 4 +- packages/lxd/lib/file/read/index.js | 4 +- packages/lxd/lib/file/read/schema.json | 2 +- packages/lxd/lib/goodies/prlimit/index.js | 8 +- packages/lxd/lib/goodies/prlimit/schema.json | 2 +- packages/lxd/lib/info/index.js | 4 +- packages/lxd/lib/info/schema.json | 2 +- packages/lxd/lib/init/index.js | 6 +- packages/lxd/lib/list/index.js | 4 +- packages/lxd/lib/network/attach/index.js | 8 +- packages/lxd/lib/network/attach/schema.json | 2 +- packages/lxd/lib/network/delete/index.js | 4 +- packages/lxd/lib/network/detach/index.js | 6 +- packages/lxd/lib/network/detach/schema.json | 2 +- packages/lxd/lib/network/index.js | 22 +- packages/lxd/lib/network/list/index.js | 4 +- packages/lxd/lib/query/index.js | 6 +- packages/lxd/lib/query/schema.json | 2 +- packages/lxd/lib/register.js | 101 ++- packages/lxd/lib/resources/index.js | 4 +- packages/lxd/lib/running/index.js | 4 +- packages/lxd/lib/running/schema.json | 2 +- packages/lxd/lib/start/index.js | 4 +- packages/lxd/lib/start/schema.json | 2 +- packages/lxd/lib/state/index.js | 4 +- packages/lxd/lib/state/schema.json | 2 +- packages/lxd/lib/stop/index.js | 4 +- packages/lxd/lib/stop/schema.json | 2 +- packages/lxd/lib/storage/delete/index.js | 6 +- packages/lxd/lib/storage/exists/index.js | 4 +- packages/lxd/lib/storage/index.js | 123 ++- packages/lxd/lib/storage/list/index.js | 4 +- .../lxd/lib/storage/volume/attach/index.js | 4 +- .../lxd/lib/storage/volume/attach/schema.json | 2 +- .../lxd/lib/storage/volume/delete/index.js | 4 +- packages/lxd/lib/storage/volume/get/index.js | 4 +- packages/lxd/lib/storage/volume/index.js | 4 +- packages/lxd/lib/storage/volume/list/index.js | 4 +- packages/lxd/lib/utils/index.js | 6 +- .../lxd/lib/utils/stderr_to_error_message.js | 4 +- packages/lxd/lib/wait/ready/index.js | 115 ++- packages/lxd/lib/wait/ready/schema.json | 2 +- packages/lxd/package.json | 27 +- packages/lxd/test.sample.coffee | 4 +- packages/lxd/test/cluster/delete.coffee | 23 +- packages/lxd/test/cluster/index.coffee | 19 +- .../lxd/test/cluster/index.lifecycle.coffee | 13 +- packages/lxd/test/cluster/stop.coffee | 15 +- packages/lxd/test/config/device/delete.coffee | 14 +- packages/lxd/test/config/device/exists.coffee | 14 +- packages/lxd/test/config/device/index.coffee | 29 +- packages/lxd/test/config/device/show.coffee | 12 +- packages/lxd/test/config/set.coffee | 14 +- packages/lxd/test/delete.coffee | 14 +- packages/lxd/test/exec.coffee | 28 +- packages/lxd/test/exists.coffee | 12 +- packages/lxd/test/file/exists.coffee | 16 +- packages/lxd/test/file/pull.coffee | 169 ++-- packages/lxd/test/file/push.coffee | 27 +- packages/lxd/test/file/read.coffee | 17 +- packages/lxd/test/goodies/prlimit.coffee | 12 +- packages/lxd/test/info.coffee | 12 +- packages/lxd/test/init.coffee | 38 +- packages/lxd/test/list.coffee | 34 +- packages/lxd/test/network/attach.coffee | 14 +- packages/lxd/test/network/delete.coffee | 10 +- packages/lxd/test/network/detach.coffee | 14 +- packages/lxd/test/network/index.coffee | 10 +- packages/lxd/test/network/list.coffee | 10 +- packages/lxd/test/query.coffee | 12 +- packages/lxd/test/resources.coffee | 10 +- packages/lxd/test/running.coffee | 14 +- packages/lxd/test/start.coffee | 14 +- packages/lxd/test/state.coffee | 12 +- packages/lxd/test/stop.coffee | 14 +- packages/lxd/test/storage/delete.coffee | 10 +- packages/lxd/test/storage/exists.coffee | 10 +- packages/lxd/test/storage/index.coffee | 10 +- packages/lxd/test/storage/list.coffee | 10 +- .../lxd/test/storage/volume/attach.coffee | 26 +- .../lxd/test/storage/volume/delete.coffee | 10 +- packages/lxd/test/storage/volume/get.coffee | 10 +- packages/lxd/test/storage/volume/index.coffee | 10 +- packages/lxd/test/storage/volume/list.coffee | 10 +- packages/lxd/test/test.coffee | 57 +- packages/lxd/test/wait/ready.coffee | 18 +- packages/network/README.md | 18 + packages/network/lib/http/index.js | 132 +-- packages/network/lib/http/schema.json | 8 +- packages/network/lib/http/wait/index.js | 6 +- packages/network/lib/http/wait/schema.json | 6 +- packages/network/lib/register.js | 21 +- packages/network/lib/tcp/assert/index.js | 84 +- packages/network/lib/tcp/assert/schema.json | 8 +- packages/network/lib/tcp/wait/index.js | 8 +- packages/network/lib/utils/curl.js | 334 ++++---- packages/network/lib/utils/index.js | 7 +- packages/network/package.json | 25 +- packages/network/test.sample.coffee | 4 +- packages/network/test/http/index.coffee | 132 +-- packages/network/test/http/wait.coffee | 76 +- packages/network/test/tcp/assert.coffee | 38 +- packages/network/test/tcp/wait.coffee | 12 +- packages/network/test/test.coffee | 26 +- packages/nikita/README.md | 45 + packages/nikita/lib/index.js | 31 +- packages/nikita/package.json | 8 +- packages/nikita/test/index.js | 19 +- packages/service/README.md | 15 + packages/service/env/archlinux/Dockerfile | 24 +- .../service/env/archlinux/docker-compose.yml | 8 +- packages/service/env/archlinux/test.coffee | 2 +- packages/service/env/centos7/Dockerfile | 32 +- packages/service/env/centos7/test.coffee | 2 +- packages/service/env/run.sh | 5 +- packages/service/env/systemctl/index.coffee | 47 -- packages/service/env/systemctl/index.js | 61 ++ packages/service/env/systemctl/test.coffee | 8 +- .../ubuntu-1404-outdated/docker-compose.yml | 21 + .../env/ubuntu-1404-outdated/entrypoint.sh | 19 + .../ubuntu-1404-outdated/nodejs/Dockerfile | 27 + .../service/env/ubuntu-1404-outdated/run.sh | 4 + .../ubuntu-1404-outdated/target/Dockerfile | 21 + .../env/ubuntu-1404-outdated/test.coffee | 17 + .../env/ubuntu-1404/docker-compose.yml | 21 + .../service/env/ubuntu-1404/entrypoint.sh | 19 + .../service/env/ubuntu-1404/nodejs/Dockerfile | 27 + packages/service/env/ubuntu-1404/run.sh | 4 + .../service/env/ubuntu-1404/target/Dockerfile | 21 + .../env/{ubuntu => ubuntu-1404}/test.coffee | 10 +- packages/service/env/ubuntu/Dockerfile | 37 - .../service/env/ubuntu/docker-compose.yml | 11 - packages/service/lib/assert/index.js | 40 +- packages/service/lib/assert/schema.json | 2 +- packages/service/lib/discover/index.js | 6 +- packages/service/lib/index.js | 14 +- packages/service/lib/init/index.js | 45 +- packages/service/lib/init/schema.json | 19 +- packages/service/lib/install/index.js | 164 +--- packages/service/lib/install/schema.json | 18 +- packages/service/lib/installed/index.js | 65 ++ packages/service/lib/installed/schema.json | 13 + packages/service/lib/outdated/index.js | 82 ++ packages/service/lib/outdated/schema.json | 13 + packages/service/lib/register.js | 38 +- packages/service/lib/remove/index.js | 128 ++- packages/service/lib/remove/schema.json | 4 +- packages/service/lib/restart/index.js | 4 +- packages/service/lib/restart/schema.json | 2 +- packages/service/lib/schema.json | 14 +- packages/service/lib/start/index.js | 6 +- packages/service/lib/start/schema.json | 2 +- packages/service/lib/startup/index.js | 6 +- packages/service/lib/startup/schema.json | 2 +- packages/service/lib/status/README.md | 6 +- packages/service/lib/status/index.js | 55 +- packages/service/lib/status/schema.json | 2 +- packages/service/lib/stop/index.js | 6 +- packages/service/lib/stop/schema.json | 2 +- packages/service/package.json | 26 +- packages/service/test.sample.coffee | 4 +- packages/service/test/assert.coffee | 134 +-- packages/service/test/crond-systemd.hbs | 12 - packages/service/test/crond.hbs | 131 --- packages/service/test/discover.coffee | 10 +- .../service/test/index.config.startup.coffee | 54 +- .../service/test/index.config.state.coffee | 83 +- packages/service/test/init.coffee | 177 ++-- packages/service/test/install.arch.coffee | 39 + packages/service/test/install.coffee | 89 +- packages/service/test/installed.coffee | 58 ++ packages/service/test/outdated.coffee | 50 ++ packages/service/test/remove.coffee | 29 +- packages/service/test/restart.coffee | 22 +- packages/service/test/start.coffee | 30 +- packages/service/test/startup.coffee | 44 +- packages/service/test/status.coffee | 48 +- packages/service/test/stop.coffee | 20 +- packages/service/test/test.coffee | 41 +- packages/system/README.md | 14 + packages/system/env/cgroups-multipass/run.sh | 48 +- .../system/env/cgroups-multipass/test.coffee | 2 +- packages/system/env/cgroups/index.coffee | 60 -- packages/system/env/cgroups/index.js | 82 ++ packages/system/env/cgroups/test.coffee | 2 +- packages/system/env/info_archlinux/Dockerfile | 25 +- .../env/info_archlinux/docker-compose.yml | 13 +- .../system/env/info_archlinux/test.coffee | 2 +- .../env/info_centos6/docker-compose.yml | 30 +- .../system/env/info_centos6/nodejs/Dockerfile | 28 +- .../system/env/info_centos6/target/Dockerfile | 40 +- packages/system/env/info_centos6/test.coffee | 3 +- packages/system/env/info_centos7/Dockerfile | 15 +- .../env/info_centos7/docker-compose.yml | 30 +- .../system/env/info_centos7/entrypoint.sh | 3 - .../system/env/info_centos7/nodejs/Dockerfile | 27 + .../system/env/info_centos7/target/Dockerfile | 30 + packages/system/env/info_centos7/test.coffee | 9 +- packages/system/env/info_ubuntu/Dockerfile | 31 +- .../system/env/info_ubuntu/docker-compose.yml | 13 +- packages/system/env/info_ubuntu/test.coffee | 4 +- packages/system/env/limits/Dockerfile | 33 +- packages/system/env/limits/test.coffee | 2 +- packages/system/env/run.sh | 4 +- packages/system/env/tmpfs/Dockerfile | 29 +- packages/system/env/tmpfs/docker-compose.yml | 13 +- packages/system/env/tmpfs/test.coffee | 2 +- packages/system/env/user/Dockerfile | 27 +- packages/system/env/user/docker-compose.yml | 13 +- packages/system/env/user/test.coffee | 2 +- packages/system/lib/cgroups/index.js | 10 +- packages/system/lib/group/index.js | 7 +- packages/system/lib/group/read/index.js | 6 +- packages/system/lib/group/read/schema.json | 2 +- packages/system/lib/group/remove/index.js | 4 +- packages/system/lib/info/disks/index.js | 6 +- packages/system/lib/info/os/index.js | 6 +- packages/system/lib/limits/index.js | 6 +- packages/system/lib/mod/index.js | 10 +- packages/system/lib/register.js | 50 +- packages/system/lib/running/index.js | 6 +- packages/system/lib/tmpfs/index.js | 33 +- packages/system/lib/tmpfs/schema.json | 6 +- packages/system/lib/uid_gid/index.js | 4 +- packages/system/lib/uid_gid/schema.json | 4 +- packages/system/lib/user/index.js | 23 +- packages/system/lib/user/read/index.js | 6 +- packages/system/lib/user/read/schema.json | 2 +- packages/system/lib/user/remove/index.js | 4 +- packages/system/lib/utils/cgconfig.js | 69 +- packages/system/lib/utils/index.js | 13 +- packages/system/lib/utils/tmpfs.js | 86 +- packages/system/package.json | 27 +- packages/system/test.sample.coffee | 4 +- packages/system/test/cgroups.coffee | 24 +- packages/system/test/group/index.coffee | 22 +- packages/system/test/group/read.coffee | 25 +- packages/system/test/group/remove.coffee | 16 +- packages/system/test/info/disks.coffee | 10 +- packages/system/test/info/os.coffee | 16 +- packages/system/test/limits.coffee | 32 +- packages/system/test/mod.coffee | 16 +- packages/system/test/running.coffee | 12 +- packages/system/test/test.coffee | 26 +- packages/system/test/tmpfs.coffee | 79 +- packages/system/test/uid_gid.coffee | 26 +- packages/system/test/user/index.coffee | 25 +- packages/system/test/user/read.coffee | 36 +- packages/system/test/user/remove.coffee | 19 +- packages/system/test/utils/tmpfs.coffee | 54 ++ packages/tools/README.md | 14 + packages/tools/env/cron/Dockerfile | 18 +- packages/tools/env/cron/test.coffee | 2 +- packages/tools/env/dconf/Dockerfile | 23 +- packages/tools/env/dconf/test.coffee | 2 +- packages/tools/env/iptables/index.coffee | 48 -- packages/tools/env/iptables/index.js | 58 ++ packages/tools/env/iptables/test.coffee | 4 +- packages/tools/env/npm/index.coffee | 48 -- packages/tools/env/npm/index.js | 58 ++ packages/tools/env/npm/test.coffee | 4 +- packages/tools/env/repo-alma8/Dockerfile | 30 +- .../tools/env/repo-alma8/docker-compose.yml | 6 +- packages/tools/env/repo-alma8/test.coffee | 5 +- packages/tools/env/repo-rocky9/Dockerfile | 38 + .../tools/env/repo-rocky9/docker-compose.yml | 11 + .../env/repo-rocky9}/entrypoint.sh | 0 packages/tools/env/repo-rocky9/run.sh | 4 + packages/tools/env/repo-rocky9/test.coffee | 17 + packages/tools/env/rubygems/index.coffee | 56 -- packages/tools/env/rubygems/index.js | 65 ++ packages/tools/env/rubygems/test.coffee | 4 +- packages/tools/env/run.sh | 8 +- packages/tools/lib/backup/index.js | 42 +- packages/tools/lib/backup/schema.json | 2 +- packages/tools/lib/compress/index.js | 4 +- packages/tools/lib/cron/add/index.js | 8 +- packages/tools/lib/cron/remove/index.js | 8 +- packages/tools/lib/dconf/index.js | 6 +- packages/tools/lib/extract/index.js | 6 +- packages/tools/lib/git/index.js | 6 +- packages/tools/lib/gsettings/index.js | 6 +- packages/tools/lib/iptables/README.md | 9 +- packages/tools/lib/iptables/index.js | 104 +-- packages/tools/lib/iptables/schema.json | 2 +- packages/tools/lib/npm/index.js | 4 +- packages/tools/lib/npm/list/index.js | 4 +- packages/tools/lib/npm/list/schema.json | 2 +- packages/tools/lib/npm/outdated/index.js | 4 +- packages/tools/lib/npm/outdated/schema.json | 2 +- packages/tools/lib/npm/schema.json | 4 +- packages/tools/lib/npm/uninstall/index.js | 75 +- packages/tools/lib/npm/uninstall/schema.json | 2 +- packages/tools/lib/npm/upgrade/index.js | 8 +- packages/tools/lib/npm/upgrade/schema.json | 2 +- packages/tools/lib/register.js | 55 +- packages/tools/lib/repo/index.js | 130 ++- packages/tools/lib/repo/schema.json | 17 +- packages/tools/lib/rubygems/fetch/index.js | 6 +- packages/tools/lib/rubygems/install/index.js | 8 +- .../tools/lib/rubygems/install/schema.json | 2 +- packages/tools/lib/rubygems/remove/index.js | 6 +- packages/tools/lib/ssh/keygen/index.js | 6 +- packages/tools/lib/sysctl/index.js | 6 +- packages/tools/lib/utils/index.js | 8 +- packages/tools/lib/utils/iptables.js | 784 ++++++++++-------- packages/tools/package.json | 28 +- packages/tools/test.sample.coffee | 4 +- packages/tools/test/backup.coffee | 22 +- packages/tools/test/compress.coffee | 16 +- packages/tools/test/cron/add.coffee | 18 +- packages/tools/test/cron/remove.coffee | 10 +- packages/tools/test/dconf.coffee | 10 +- packages/tools/test/extract.coffee | 15 +- packages/tools/test/git.coffee | 11 +- packages/tools/test/iptables.coffee | 13 +- packages/tools/test/npm/index.coffee | 10 +- packages/tools/test/npm/list.coffee | 10 +- packages/tools/test/npm/outdated.coffee | 10 +- packages/tools/test/npm/uninstall.coffee | 10 +- packages/tools/test/npm/upgrade.coffee | 10 +- packages/tools/test/repo.coffee | 172 ++-- packages/tools/test/resources/a_file | 1 - .../tools/test/resources/module_async.coffee | 5 - .../test/resources/module_async_object.coffee | 5 - .../tools/test/resources/module_sync.coffee | 3 - packages/tools/test/resources/render.eco | 1 - packages/tools/test/rubygems/fetch.coffee | 14 +- packages/tools/test/rubygems/install.coffee | 20 +- packages/tools/test/rubygems/remove.coffee | 16 +- packages/tools/test/ssh/keygen.coffee | 10 +- packages/tools/test/sysctl.coffee | 10 +- packages/tools/test/test.coffee | 26 +- packages/tools/test/utils/iptables.coffee | 7 +- 1091 files changed, 13648 insertions(+), 11919 deletions(-) delete mode 100644 extra/lxd-runner/lib/actions/info.js delete mode 100644 extra/lxd-runner/src/actions/delete.coffee delete mode 100644 extra/lxd-runner/src/actions/enter.coffee delete mode 100644 extra/lxd-runner/src/actions/exec.coffee delete mode 100644 extra/lxd-runner/src/actions/run.coffee delete mode 100644 extra/lxd-runner/src/actions/start.coffee delete mode 100644 extra/lxd-runner/src/actions/state.coffee delete mode 100644 extra/lxd-runner/src/actions/stop.coffee delete mode 100644 extra/lxd-runner/src/actions/test.coffee delete mode 100644 extra/lxd-runner/src/index.coffee delete mode 100644 packages/core/env/chown/index.coffee create mode 100644 packages/core/env/chown/index.js delete mode 100644 packages/core/env/ssh/index.coffee create mode 100644 packages/core/env/ssh/index.js rename packages/core/lib/plugins/{assertions/index.js => assertions.js} (92%) rename packages/core/lib/plugins/{conditions/index.js => conditions.js} (88%) create mode 100644 packages/core/lib/plugins/metadata/audit.js create mode 100644 packages/core/test/loaders/all.js create mode 100644 packages/core/test/loaders/coffee.js create mode 100644 packages/db/lib/utils/db.js create mode 100644 packages/db/test/utils/command.coffee create mode 100644 packages/db/test/utils/escape.coffee delete mode 100644 packages/db/test/utils/index.coffee create mode 100644 packages/db/test/utils/jdbc.coffee create mode 100644 packages/docker/lib/images/README.md create mode 100644 packages/docker/lib/images/index.js create mode 100644 packages/docker/lib/images/schema.json create mode 100644 packages/docker/lib/utils/docker.js create mode 100644 packages/docker/test/images.coffee delete mode 100644 packages/file/test/download.zip delete mode 100644 packages/file/test/types/test.coffee delete mode 100644 packages/ipa/env/ipa/index.coffee create mode 100644 packages/ipa/env/ipa/index.js create mode 100644 packages/java/env/openjdk9/docker-compose.yml create mode 100755 packages/java/env/openjdk9/entrypoint.sh create mode 100644 packages/java/env/openjdk9/nodejs/Dockerfile rename packages/{service/env/ubuntu => java/env/openjdk9}/run.sh (100%) create mode 100644 packages/java/env/openjdk9/target/Dockerfile create mode 100644 packages/java/env/openjdk9/test.coffee create mode 100755 packages/java/env/run.sh rename packages/java/lib/{keystore_add => keystore/add}/README.md (93%) create mode 100644 packages/java/lib/keystore/add/index.js rename packages/java/lib/{keystore_add => keystore/add}/schema.json (89%) create mode 100644 packages/java/lib/keystore/exists/README.md create mode 100644 packages/java/lib/keystore/exists/index.js create mode 100644 packages/java/lib/keystore/exists/schema.json rename packages/java/lib/{keystore_remove => keystore/remove}/README.md (80%) create mode 100644 packages/java/lib/keystore/remove/index.js rename packages/java/lib/{keystore_remove => keystore/remove}/schema.json (94%) delete mode 100644 packages/java/lib/keystore_add/index.js delete mode 100644 packages/java/lib/keystore_remove/index.js rename packages/java/test/{keystore_add.coffee => keystore/add.coffee} (61%) create mode 100644 packages/java/test/keystore/exists.coffee create mode 100644 packages/java/test/keystore/remove.coffee delete mode 100644 packages/java/test/keystore_remove.coffee rename packages/java/test/{keystore => resources}/certs1/cacert.pem (100%) rename packages/java/test/{keystore => resources}/certs1/cacert.seq (100%) rename packages/java/test/{keystore => resources}/certs1/cacert_key.pem (100%) rename packages/java/test/{keystore => resources}/certs1/generate (100%) rename packages/java/test/{keystore => resources}/certs1/node_1_cert.pem (100%) rename packages/java/test/{keystore => resources}/certs1/node_1_key.pem (100%) rename packages/java/test/{keystore => resources}/certs2/cacert.pem (100%) rename packages/java/test/{keystore => resources}/certs2/cacert.seq (100%) rename packages/java/test/{keystore => resources}/certs2/cacert_key.pem (100%) rename packages/java/test/{keystore => resources}/certs2/generate (100%) rename packages/java/test/{keystore => resources}/certs2/node_1_cert.pem (100%) rename packages/java/test/{keystore => resources}/certs2/node_1_key.pem (100%) delete mode 100644 packages/service/env/systemctl/index.coffee create mode 100644 packages/service/env/systemctl/index.js create mode 100644 packages/service/env/ubuntu-1404-outdated/docker-compose.yml create mode 100755 packages/service/env/ubuntu-1404-outdated/entrypoint.sh create mode 100644 packages/service/env/ubuntu-1404-outdated/nodejs/Dockerfile create mode 100755 packages/service/env/ubuntu-1404-outdated/run.sh create mode 100644 packages/service/env/ubuntu-1404-outdated/target/Dockerfile create mode 100644 packages/service/env/ubuntu-1404-outdated/test.coffee create mode 100644 packages/service/env/ubuntu-1404/docker-compose.yml create mode 100755 packages/service/env/ubuntu-1404/entrypoint.sh create mode 100644 packages/service/env/ubuntu-1404/nodejs/Dockerfile create mode 100755 packages/service/env/ubuntu-1404/run.sh create mode 100644 packages/service/env/ubuntu-1404/target/Dockerfile rename packages/service/env/{ubuntu => ubuntu-1404}/test.coffee (60%) delete mode 100644 packages/service/env/ubuntu/Dockerfile delete mode 100644 packages/service/env/ubuntu/docker-compose.yml create mode 100644 packages/service/lib/installed/index.js create mode 100644 packages/service/lib/installed/schema.json create mode 100644 packages/service/lib/outdated/index.js create mode 100644 packages/service/lib/outdated/schema.json delete mode 100644 packages/service/test/crond-systemd.hbs delete mode 100644 packages/service/test/crond.hbs create mode 100644 packages/service/test/install.arch.coffee create mode 100644 packages/service/test/installed.coffee create mode 100644 packages/service/test/outdated.coffee delete mode 100644 packages/system/env/cgroups/index.coffee create mode 100644 packages/system/env/cgroups/index.js create mode 100644 packages/system/env/info_centos7/nodejs/Dockerfile create mode 100644 packages/system/env/info_centos7/target/Dockerfile create mode 100644 packages/system/test/utils/tmpfs.coffee delete mode 100644 packages/tools/env/iptables/index.coffee create mode 100644 packages/tools/env/iptables/index.js delete mode 100644 packages/tools/env/npm/index.coffee create mode 100644 packages/tools/env/npm/index.js create mode 100644 packages/tools/env/repo-rocky9/Dockerfile create mode 100644 packages/tools/env/repo-rocky9/docker-compose.yml rename packages/{service/env/ubuntu => tools/env/repo-rocky9}/entrypoint.sh (100%) create mode 100755 packages/tools/env/repo-rocky9/run.sh create mode 100644 packages/tools/env/repo-rocky9/test.coffee delete mode 100644 packages/tools/env/rubygems/index.coffee create mode 100644 packages/tools/env/rubygems/index.js delete mode 100644 packages/tools/test/resources/a_file delete mode 100644 packages/tools/test/resources/module_async.coffee delete mode 100644 packages/tools/test/resources/module_async_object.coffee delete mode 100644 packages/tools/test/resources/module_sync.coffee delete mode 100644 packages/tools/test/resources/render.eco diff --git a/extra/lxd-runner/lib/actions/delete.js b/extra/lxd-runner/lib/actions/delete.js index e5f2c769e..9bbbb5a70 100644 --- a/extra/lxd-runner/lib/actions/delete.js +++ b/extra/lxd-runner/lib/actions/delete.js @@ -1,6 +1,5 @@ -// Generated by CoffeeScript 2.7.0 -module.exports = function({config}) { - return this.lxc.delete({ +export default async function({config}) { + await this.lxc.delete({ $header: 'Container delete', container: `${config.container}`, force: config.force diff --git a/extra/lxd-runner/lib/actions/enter.js b/extra/lxd-runner/lib/actions/enter.js index 56d9eed0d..a4a6c85bb 100644 --- a/extra/lxd-runner/lib/actions/enter.js +++ b/extra/lxd-runner/lib/actions/enter.js @@ -1,11 +1,10 @@ -// Generated by CoffeeScript 2.7.0 -module.exports = function({config}) { +export default async function({config}) { // Note, using `lxc shell` would be nice but `cwd` doesn't seem right // `lxc shell --cwd /nikita/packages/$pkg` then `pwd` return `/root` // `lxc shell --cwd /nikita/packages/$pkg -- pkg` prints: // `pwd: ignoring non-option arguments` // `/nikita/packages/$pkg` - return this.execute({ + await this.execute({ $header: 'Container enter', command: `lxc exec --cwd ${config.cwd} ${config.container} -- bash`, stdio: ['inherit', 'inherit', 'inherit'], diff --git a/extra/lxd-runner/lib/actions/exec.js b/extra/lxd-runner/lib/actions/exec.js index 4a9613d17..dea4c73aa 100644 --- a/extra/lxd-runner/lib/actions/exec.js +++ b/extra/lxd-runner/lib/actions/exec.js @@ -1,11 +1,10 @@ -// Generated by CoffeeScript 2.7.0 -module.exports = function({config}) { +export default async function({config}) { // Note, using `lxc shell` would be nice but `cwd` doesn't seem right // `lxc shell --cwd /nikita/packages/$pkg` then `pwd` return `/root` // `lxc shell --cwd /nikita/packages/$pkg -- pkg` prints: // `pwd: ignoring non-option arguments` // `/nikita/packages/$pkg` - return this.execute({ + await this.execute({ $header: 'Container exec', command: `lxc exec --cwd ${config.cwd} ${config.container} -- ${config.cmd}`, stdio: ['inherit', 'inherit', 'inherit'], diff --git a/extra/lxd-runner/lib/actions/info.js b/extra/lxd-runner/lib/actions/info.js deleted file mode 100644 index a5c4594d2..000000000 --- a/extra/lxd-runner/lib/actions/info.js +++ /dev/null @@ -1,9 +0,0 @@ -// Generated by CoffeeScript 2.7.0 -module.exports = async function({config}) { - var info; - info = (await this.lxc.info({ - $header: 'Container delete', - container: `${config.container}` - })); - return console.log(info); -}; diff --git a/extra/lxd-runner/lib/actions/run.js b/extra/lxd-runner/lib/actions/run.js index 03b6de4fa..350eb8076 100644 --- a/extra/lxd-runner/lib/actions/run.js +++ b/extra/lxd-runner/lib/actions/run.js @@ -1,6 +1,5 @@ -// Generated by CoffeeScript 2.7.0 -module.exports = async function({config}) { - await this.call('@nikitajs/lxd-runner/lib/actions/start', config); - await this.call('@nikitajs/lxd-runner/lib/actions/test', config); - return (await this.call('@nikitajs/lxd-runner/lib/actions/stop', config)); +export default async function({config}) { + await this.call('@nikitajs/lxd-runner/start', config); + await this.call('@nikitajs/lxd-runner/test', config); + await this.call('@nikitajs/lxd-runner/stop', config); }; diff --git a/extra/lxd-runner/lib/actions/start.js b/extra/lxd-runner/lib/actions/start.js index 9142cfb96..2ce76ebd3 100644 --- a/extra/lxd-runner/lib/actions/start.js +++ b/extra/lxd-runner/lib/actions/start.js @@ -1,6 +1,5 @@ -// Generated by CoffeeScript 2.7.0 -module.exports = function({config}) { - return this.lxc.cluster({ +export default async function({config}) { + await this.lxc.cluster({ $header: 'Container start' }, config.cluster); }; diff --git a/extra/lxd-runner/lib/actions/state.js b/extra/lxd-runner/lib/actions/state.js index 6d57f2b1d..602af3046 100644 --- a/extra/lxd-runner/lib/actions/state.js +++ b/extra/lxd-runner/lib/actions/state.js @@ -1,8 +1,7 @@ -// Generated by CoffeeScript 2.7.0 -module.exports = async function({config, ...args}) { - ({config} = (await this.lxc.state({ +export default async function({config}) { + const {config: state} = await this.lxc.state({ $header: 'Container state', container: `${config.container}` - }))); - return process.stdout.write(JSON.stringify(config, null, 2)); + }); + process.stdout.write(JSON.stringify(state, null, 2)); }; diff --git a/extra/lxd-runner/lib/actions/stop.js b/extra/lxd-runner/lib/actions/stop.js index ec58be592..d5582e1cb 100644 --- a/extra/lxd-runner/lib/actions/stop.js +++ b/extra/lxd-runner/lib/actions/stop.js @@ -1,6 +1,5 @@ -// Generated by CoffeeScript 2.7.0 -module.exports = function({config}) { - return this.lxc.stop({ +export default async function({config}) { + await this.lxc.stop({ $header: 'Container stop', container: `${config.container}` }); diff --git a/extra/lxd-runner/lib/actions/test.js b/extra/lxd-runner/lib/actions/test.js index d26e3faa6..b38b57ce8 100644 --- a/extra/lxd-runner/lib/actions/test.js +++ b/extra/lxd-runner/lib/actions/test.js @@ -1,11 +1,10 @@ -// Generated by CoffeeScript 2.7.0 -module.exports = function({config}) { +export default async function({config}) { // @lxc.exec // container: "#{config.container}" // cwd: "#{config.cwd}" // command: 'npm run test:local' // shell: 'bash -l' - return this.execute({ + await this.execute({ stdout: process.stdout, env: process.env, command: [ diff --git a/extra/lxd-runner/lib/index.js b/extra/lxd-runner/lib/index.js index 61f7797a7..2bf756ab1 100644 --- a/extra/lxd-runner/lib/index.js +++ b/extra/lxd-runner/lib/index.js @@ -1,170 +1,211 @@ -// Generated by CoffeeScript 2.7.0 -var nikita, path, shell; +import path from "node:path"; +import { shell } from "shell"; +import nikita from "@nikitajs/core"; +import "@nikitajs/log/register"; +import "@nikitajs/lxd/register"; -path = require('path'); - -({shell} = require('shell')); - -nikita = require('@nikitajs/core'); - -require('@nikitajs/log/lib/register'); - -require('@nikitajs/lxd/lib/register'); - -module.exports = function(config) { +export default function (config) { return shell({ - name: 'nikita-test-runner', + name: "nikita-test-runner", description: `Execute test inside the LXD environment.`, options: { container: { default: `${config.container}`, description: `Name of the container.`, - required: !config.container + required: !config.container, }, cwd: { default: `${config.cwd}`, description: `Absolute path inside the container to use as the working directory.`, - required: !config.cwd + required: !config.cwd, }, debug: { default: false, - type: 'boolean', - description: `Instantiate the Nikita session in debug mode.` + type: "boolean", + description: `Instantiate the Nikita session in debug mode.`, }, logdir: { default: `${config.logdir}`, - description: `Directory were to store the logs.` - } + description: `Directory were to store the logs.`, + }, }, commands: { - 'delete': { + delete: { description: `Delete a container container.`, options: { force: { - type: 'boolean', - shortcut: 'f', - description: `Force the container removal even if it is started.` - } + type: "boolean", + shortcut: "f", + description: `Force the container removal even if it is started.`, + }, }, - handler: function({params}) { + handler: function ({ params }) { return nikita({ - $debug: params.debug - }).log.cli({ - pad: { - host: 20, - header: 60 - } - }).log.md({ - filename: path.resolve(params.logdir, 'delete.md') - }).call('@nikitajs/lxd-runner/lib/actions/delete', {...config, ...params}); - } + $debug: params.debug, + }) + .log.cli({ + pad: { + host: 20, + header: 60, + }, + }) + .log.md({ + filename: path.resolve(params.logdir, "delete.md"), + }) + .call("@nikitajs/lxd-runner/delete", { + ...config, + ...params, + }); + }, }, - 'enter': { + enter: { description: `Open a prompt running inside the container.`, - handler: function({params}) { + handler: function ({ params }) { return nikita({ - $debug: params.debug - }).log.cli({ - pad: { - host: 20, - header: 60 - } - }).log.md({ - filename: path.resolve(params.logdir, 'enter.md') - }).call('@nikitajs/lxd-runner/lib/actions/enter', {...config, ...params}); - } + $debug: params.debug, + }) + .log.cli({ + pad: { + host: 20, + header: 60, + }, + }) + .log.md({ + filename: path.resolve(params.logdir, "enter.md"), + }) + .call("@nikitajs/lxd-runner/enter", { + ...config, + ...params, + }); + }, }, - 'exec': { + exec: { description: `Execute a command inside the container console.`, - main: 'cmd', - handler: function({params}) { + main: "cmd", + handler: function ({ params }) { return nikita({ - $debug: params.debug - }).log.cli({ - pad: { - host: 20, - header: 60 - } - }).log.md({ - filename: path.resolve(params.logdir, 'exec.md') - }).call('@nikitajs/lxd-runner/lib/actions/exec', {...config, ...params}); - } + $debug: params.debug, + }) + .log.cli({ + pad: { + host: 20, + header: 60, + }, + }) + .log.md({ + filename: path.resolve(params.logdir, "exec.md"), + }) + .call("@nikitajs/lxd-runner/exec", { + ...config, + ...params, + }); + }, }, - 'state': { + state: { description: `Print machine state and information.`, - handler: function({params}) { + handler: function ({ params }) { return nikita({ - $debug: params.debug - }).log.cli({ - pad: { - host: 20, - header: 60 - } - }).log.md({ - filename: path.resolve(params.logdir, 'exec.md') - }).call('@nikitajs/lxd-runner/lib/actions/state', {...config, ...params}); - } + $debug: params.debug, + }) + .log.cli({ + pad: { + host: 20, + header: 60, + }, + }) + .log.md({ + filename: path.resolve(params.logdir, "exec.md"), + }) + .call("@nikitajs/lxd-runner/state", { + ...config, + ...params, + }); + }, }, - 'run': { + run: { description: `Start and stop the container and execute all the tests.`, - handler: function({params}) { + handler: function ({ params }) { return nikita({ - $debug: params.debug - }).log.cli({ - pad: { - host: 20, - header: 60 - } - }).log.md({ - filename: path.resolve(params.logdir, 'run.md') - }).call('@nikitajs/lxd-runner/lib/actions/run', {...config, ...params}); - } + $debug: params.debug, + }) + .log.cli({ + pad: { + host: 20, + header: 60, + }, + }) + .log.md({ + filename: path.resolve(params.logdir, "run.md"), + }) + .call("@nikitajs/lxd-runner/run", { + ...config, + ...params, + }); + }, }, - 'start': { + start: { description: `Start the container.`, - handler: function({params}) { + handler: function ({ params }) { return nikita({ - $debug: params.debug - }).log.cli({ - pad: { - host: 20, - header: 60 - } - }).log.md({ - filename: path.resolve(params.logdir, 'start.md') - }).call('@nikitajs/lxd-runner/lib/actions/start', {...config, ...params}); - } + $debug: params.debug, + }) + .log.cli({ + pad: { + host: 20, + header: 60, + }, + }) + .log.md({ + filename: path.resolve(params.logdir, "start.md"), + }) + .call("@nikitajs/lxd-runner/start", { + ...config, + ...params, + }); + }, }, - 'stop': { + stop: { description: `Stop the container.`, - handler: function({params}) { + handler: function ({ params }) { return nikita({ - $debug: params.debug - }).log.cli({ - pad: { - host: 20, - header: 60 - } - }).log.md({ - filename: path.resolve(params.logdir, 'stop.md') - }).call('@nikitajs/lxd-runner/lib/actions/stop', {...config, ...params}); - } + $debug: params.debug, + }) + .log.cli({ + pad: { + host: 20, + header: 60, + }, + }) + .log.md({ + filename: path.resolve(params.logdir, "stop.md"), + }) + .call("@nikitajs/lxd-runner/stop", { + ...config, + ...params, + }); + }, }, - 'test': { + test: { description: `Execute all the tests, does not start and stop the containers, see \`run\`.`, - handler: function({params}) { + handler: function ({ params }) { return nikita({ - $debug: params.debug - }).log.cli({ - pad: { - host: 20, - header: 60 - } - }).log.md({ - filename: path.resolve(params.logdir, 'test.md') - }).call('@nikitajs/lxd-runner/lib/actions/test', {...config, ...params}); - } - } - } + $debug: params.debug, + }) + .log.cli({ + pad: { + host: 20, + header: 60, + }, + }) + .log.md({ + filename: path.resolve(params.logdir, "test.md"), + }) + .call("@nikitajs/lxd-runner/test", { + ...config, + ...params, + }); + }, + }, + }, }).route(); -}; +} diff --git a/extra/lxd-runner/package.json b/extra/lxd-runner/package.json index 52d5dc4cd..d0f338768 100644 --- a/extra/lxd-runner/package.json +++ b/extra/lxd-runner/package.json @@ -4,9 +4,6 @@ "description": "", "main": "lib/index.js", "private": true, - "scripts": { - "build": "coffee -b -o lib src" - }, "author": "David Worms", "license": "MIT", "dependencies": { @@ -15,5 +12,10 @@ }, "devDependencies": { "coffeescript": "^2.7.0" - } + }, + "exports": { + ".": "./lib/index.js", + "./*": "./lib/actions/*.js" + }, + "type": "module" } diff --git a/extra/lxd-runner/src/actions/delete.coffee b/extra/lxd-runner/src/actions/delete.coffee deleted file mode 100644 index 36c512acc..000000000 --- a/extra/lxd-runner/src/actions/delete.coffee +++ /dev/null @@ -1,6 +0,0 @@ - -module.exports = ({config}) -> - @lxc.delete - $header: 'Container delete' - container: "#{config.container}" - force: config.force diff --git a/extra/lxd-runner/src/actions/enter.coffee b/extra/lxd-runner/src/actions/enter.coffee deleted file mode 100644 index 377041c66..000000000 --- a/extra/lxd-runner/src/actions/enter.coffee +++ /dev/null @@ -1,18 +0,0 @@ - -module.exports = ({config}) -> - # Note, using `lxc shell` would be nice but `cwd` doesn't seem right - # `lxc shell --cwd /nikita/packages/$pkg` then `pwd` return `/root` - # `lxc shell --cwd /nikita/packages/$pkg -- pkg` prints: - # `pwd: ignoring non-option arguments` - # `/nikita/packages/$pkg` - @execute - $header: 'Container enter' - command: """ - lxc exec \ - --cwd #{config.cwd} \ - #{config.container} -- bash - """ - stdio: ['inherit', 'inherit', 'inherit'] - stdin: process.stdin - stdout: process.stdout - stderr: process.stderr diff --git a/extra/lxd-runner/src/actions/exec.coffee b/extra/lxd-runner/src/actions/exec.coffee deleted file mode 100644 index f7b48b7b3..000000000 --- a/extra/lxd-runner/src/actions/exec.coffee +++ /dev/null @@ -1,18 +0,0 @@ - -module.exports = ({config}) -> - # Note, using `lxc shell` would be nice but `cwd` doesn't seem right - # `lxc shell --cwd /nikita/packages/$pkg` then `pwd` return `/root` - # `lxc shell --cwd /nikita/packages/$pkg -- pkg` prints: - # `pwd: ignoring non-option arguments` - # `/nikita/packages/$pkg` - @execute - $header: 'Container exec' - command: """ - lxc exec \ - --cwd #{config.cwd} \ - #{config.container} -- #{config.cmd} - """ - stdio: ['inherit', 'inherit', 'inherit'] - stdin: process.stdin - stdout: process.stdout - stderr: process.stderr diff --git a/extra/lxd-runner/src/actions/run.coffee b/extra/lxd-runner/src/actions/run.coffee deleted file mode 100644 index d462ee658..000000000 --- a/extra/lxd-runner/src/actions/run.coffee +++ /dev/null @@ -1,5 +0,0 @@ - -module.exports = ({config}) -> - await @call '@nikitajs/lxd-runner/lib/actions/start', config - await @call '@nikitajs/lxd-runner/lib/actions/test', config - await @call '@nikitajs/lxd-runner/lib/actions/stop', config diff --git a/extra/lxd-runner/src/actions/start.coffee b/extra/lxd-runner/src/actions/start.coffee deleted file mode 100644 index c5a1d03c6..000000000 --- a/extra/lxd-runner/src/actions/start.coffee +++ /dev/null @@ -1,5 +0,0 @@ - -module.exports = ({config}) -> - @lxc.cluster - $header: 'Container start' - , config.cluster diff --git a/extra/lxd-runner/src/actions/state.coffee b/extra/lxd-runner/src/actions/state.coffee deleted file mode 100644 index 1f0cb9675..000000000 --- a/extra/lxd-runner/src/actions/state.coffee +++ /dev/null @@ -1,7 +0,0 @@ - -module.exports = ({config, ...args}) -> - { config } = await @lxc.state - $header: 'Container state' - container: "#{config.container}" - process.stdout.write JSON.stringify config, null, 2 - diff --git a/extra/lxd-runner/src/actions/stop.coffee b/extra/lxd-runner/src/actions/stop.coffee deleted file mode 100644 index 4031d5e89..000000000 --- a/extra/lxd-runner/src/actions/stop.coffee +++ /dev/null @@ -1,5 +0,0 @@ - -module.exports = ({config}) -> - @lxc.stop - $header: 'Container stop' - container: "#{config.container}" diff --git a/extra/lxd-runner/src/actions/test.coffee b/extra/lxd-runner/src/actions/test.coffee deleted file mode 100644 index 406653fc5..000000000 --- a/extra/lxd-runner/src/actions/test.coffee +++ /dev/null @@ -1,18 +0,0 @@ - -module.exports = ({config})-> - # @lxc.exec - # container: "#{config.container}" - # cwd: "#{config.cwd}" - # command: 'npm run test:local' - # shell: 'bash -l' - @execute - stdout: process.stdout - env: process.env - command: [ - 'lxc exec' - "--cwd #{config.cwd}" - # Note, core ssh env log in as "source" user - "--user #{config.test_user}" if config.test_user - "#{config.container} --" - 'bash -l -c "npm run test:local"' - ].join ' ' diff --git a/extra/lxd-runner/src/index.coffee b/extra/lxd-runner/src/index.coffee deleted file mode 100644 index f4f5b0b6e..000000000 --- a/extra/lxd-runner/src/index.coffee +++ /dev/null @@ -1,127 +0,0 @@ - -path = require 'path' -{shell} = require 'shell' -nikita = require '@nikitajs/core' -require '@nikitajs/log/lib/register' -require '@nikitajs/lxd/lib/register' - -module.exports = (config) -> - shell - name: 'nikita-test-runner' - description: ''' - Execute test inside the LXD environment. - ''' - options: - container: - default: "#{config.container}" - description: ''' - Name of the container. - ''' - required: !config.container - cwd: - default: "#{config.cwd}" - description: ''' - Absolute path inside the container to use as the working directory. - ''' - required: !config.cwd - debug: - default: false - type: 'boolean' - description: ''' - Instantiate the Nikita session in debug mode. - ''' - logdir: - default: "#{config.logdir}" - description: ''' - Directory were to store the logs. - ''' - commands: - 'delete': - description: ''' - Delete a container container. - ''' - options: - force: - type: 'boolean' - shortcut: 'f' - description: ''' - Force the container removal even if it is started. - ''' - handler: ({params}) -> - nikita - $debug: params.debug - .log.cli pad: host: 20, header: 60 - .log.md filename: path.resolve params.logdir, 'delete.md' - .call '@nikitajs/lxd-runner/lib/actions/delete', {...config, ...params} - 'enter': - description: ''' - Open a prompt running inside the container. - ''' - handler: ({params}) -> - nikita - $debug: params.debug - .log.cli pad: host: 20, header: 60 - .log.md filename: path.resolve params.logdir, 'enter.md' - .call '@nikitajs/lxd-runner/lib/actions/enter', {...config, ...params} - 'exec': - description: ''' - Execute a command inside the container console. - ''' - main: 'cmd' - handler: ({params}) -> - nikita - $debug: params.debug - .log.cli pad: host: 20, header: 60 - .log.md filename: path.resolve params.logdir, 'exec.md' - .call '@nikitajs/lxd-runner/lib/actions/exec', {...config, ...params} - 'state': - description: ''' - Print machine state and information. - ''' - handler: ({params}) -> - nikita - $debug: params.debug - .log.cli pad: host: 20, header: 60 - .log.md filename: path.resolve params.logdir, 'exec.md' - .call '@nikitajs/lxd-runner/lib/actions/state', {...config, ...params} - 'run': - description: ''' - Start and stop the container and execute all the tests. - ''' - handler: ({params}) -> - nikita - $debug: params.debug - .log.cli pad: host: 20, header: 60 - .log.md filename: path.resolve params.logdir, 'run.md' - .call '@nikitajs/lxd-runner/lib/actions/run', {...config, ...params} - 'start': - description: ''' - Start the container. - ''' - handler: ({params}) -> - nikita - $debug: params.debug - .log.cli pad: host: 20, header: 60 - .log.md filename: path.resolve params.logdir, 'start.md' - .call '@nikitajs/lxd-runner/lib/actions/start', {...config, ...params} - 'stop': - description: ''' - Stop the container. - ''' - handler: ({params}) -> - nikita - $debug: params.debug - .log.cli pad: host: 20, header: 60 - .log.md filename: path.resolve params.logdir, 'stop.md' - .call '@nikitajs/lxd-runner/lib/actions/stop', {...config, ...params} - 'test': - description: ''' - Execute all the tests, does not start and stop the containers, see `run`. - ''' - handler: ({params}) -> - nikita - $debug: params.debug - .log.cli pad: host: 20, header: 60 - .log.md filename: path.resolve params.logdir, 'test.md' - .call '@nikitajs/lxd-runner/lib/actions/test', {...config, ...params} - .route() diff --git a/packages/core/README.md b/packages/core/README.md index ab4d25659..7dc71896e 100644 --- a/packages/core/README.md +++ b/packages/core/README.md @@ -38,13 +38,13 @@ Nikita is commonly imported by refering to the `nikita` package. However, it wil To import the module: ```js -const nikita = require('@nikitajs/core'); +import nikita from "@nikitajs/core"; ``` Then, call the `register` scripts to register additionnal actions, for example the `lxd` actions: ```js -require('@nikitajs/lxd/lib/register'); +import "@nikitajs/lxd/lib/register"; ``` Following this example, you can now use any action present in the `core` and the `lxd` packages: diff --git a/packages/core/env/arch_chroot/Dockerfile b/packages/core/env/arch_chroot/Dockerfile index 4799e2549..a4406af9d 100644 --- a/packages/core/env/arch_chroot/Dockerfile +++ b/packages/core/env/arch_chroot/Dockerfile @@ -1,42 +1,41 @@ # Being a rolling release, image tags are based on dates to match the bootstrap package downloaded below FROM archlinux:latest -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " -RUN \ - pacman --noconfirm -Syu \ - && pacman --noconfirm -S procps grep which sed zip git +RUN pacman --noconfirm -Syu && \ + pacman --noconfirm -S procps grep which sed zip git # Install Node.js RUN pacman --noconfirm -S nodejs npm # Install SSH and sudo -RUN pacman --noconfirm -S openssh sudo \ - && /usr/bin/ssh-keygen -A +RUN pacman --noconfirm -S openssh sudo && \ + /usr/bin/ssh-keygen -A # Install arch-chroot # Get the latest version from # https://mirrors.edge.kernel.org/archlinux/iso/ ENV ARCHLINUX_VERSION 2021.11.01 -RUN pacman --noconfirm -S arch-install-scripts tar gzip \ - && ARCHLINUX_VERSION=`curl -s https://mirrors.edge.kernel.org/archlinux/iso/ | grep -o 'href=".*">' | sed -e "s/href=\"//g" | sed -e 's/\/">//g' | grep -v latest | tail -1` \ - && curl -L "https://mirrors.edge.kernel.org/archlinux/iso/$ARCHLINUX_VERSION/archlinux-bootstrap-x86_64.tar.gz" -o /var/tmp/archlinux-bootstrap.tar.gz \ - # && curl -L "https://mirrors.edge.kernel.org/archlinux/iso/$ARCHLINUX_VERSION/archlinux-bootstrap-$ARCHLINUX_VERSION-x86_64.tar.gz" -o /var/tmp/archlinux-bootstrap.tar.gz \ - && tar xzf /var/tmp/archlinux-bootstrap.tar.gz -C /var/tmp \ - && rm -f /var/tmp/archlinux-bootstrap.tar.gz +RUN pacman --noconfirm -S arch-install-scripts tar gzip && \ + ARCHLINUX_VERSION=`curl -s https://mirrors.edge.kernel.org/archlinux/iso/ | grep -o 'href=".*">' | sed -e "s/href=\"//g" | sed -e 's/\/">//g' | grep -v latest | tail -1` && \ + curl -L "https://mirrors.edge.kernel.org/archlinux/iso/$ARCHLINUX_VERSION/archlinux-bootstrap-x86_64.tar.gz" -o /var/tmp/archlinux-bootstrap.tar.gz && \ + # && curl -L "https://mirrors.edge.kernel.org/archlinux/iso/$ARCHLINUX_VERSION/archlinux-bootstrap-$ARCHLINUX_VERSION-x86_64.tar.gz" -o /var/tmp/archlinux-bootstrap.tar.gz \ + tar xzf /var/tmp/archlinux-bootstrap.tar.gz -C /var/tmp && \ + rm -f /var/tmp/archlinux-bootstrap.tar.gz ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita WORKDIR /nikita/packages/core # Sudo User -RUN useradd nikita -d /home/nikita \ - && mkdir -p /home/nikita \ - && chown nikita /home/nikita \ - && chmod 700 /home/nikita \ - && echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +RUN useradd nikita -d /home/nikita && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita -RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' \ - && cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/core/env/arch_chroot/docker-compose.yml b/packages/core/env/arch_chroot/docker-compose.yml index 0c19ca631..7c42a5a54 100644 --- a/packages/core/env/arch_chroot/docker-compose.yml +++ b/packages/core/env/arch_chroot/docker-compose.yml @@ -2,12 +2,19 @@ services: nodejs: build: . - image: nikita_core_arch_chroot + cap_add: + - SYS_ADMIN # Required to use `mount` container_name: nikita_core_arch_chroot_nodejs + environment: + NIKITA_TEST_MODULE: /nikita/packages/core/env/arch_chroot/test.coffee + image: nikita_core_arch_chroot + networks: + - nikita platform: linux/amd64 # Required on Apple M1 volumes: - ../../../../:/nikita - environment: - NIKITA_TEST_MODULE: /nikita/packages/core/env/arch_chroot/test.coffee - cap_add: - - SYS_ADMIN # Required to use `mount` + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/core/env/arch_chroot/test.coffee b/packages/core/env/arch_chroot/test.coffee index 7ba66c6f4..ed808e2a5 100644 --- a/packages/core/env/arch_chroot/test.coffee +++ b/packages/core/env/arch_chroot/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: conditions_if_os: true system_execute_arc_chroot: true diff --git a/packages/core/env/centos7/Dockerfile b/packages/core/env/centos7/Dockerfile index a5a78edf1..ae79418f9 100644 --- a/packages/core/env/centos7/Dockerfile +++ b/packages/core/env/centos7/Dockerfile @@ -1,12 +1,12 @@ FROM centos:7.9.2009 -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " RUN \ - # Install Node dependencies - yum install -y git make \ - # Install SSH and sudo - && yum install -y openssh-server openssh-clients sudo \ - && ssh-keygen -A + # Install Node dependencies + yum install -y git make && \ + # Install SSH and sudo + yum install -y openssh-server openssh-clients sudo && \ + ssh-keygen -A RUN yum clean all @@ -15,20 +15,19 @@ RUN mkdir -p /nikita WORKDIR /nikita/packages/core # Sudo User -RUN useradd nikita -d /home/nikita \ - && mkdir -p /home/nikita \ - && chown nikita /home/nikita \ - && chmod 700 /home/nikita \ - && echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +RUN useradd nikita -d /home/nikita && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita # Install Node.js # Note, CentOS 7.9.2009 incompatible with Node.js >= 18 -RUN \ - curl -L https://git.io/n-install | bash -s -- -y 16.20.1 +RUN curl -L https://git.io/n-install | bash -s -- -y 16.20.1 ENV PATH /home/nikita/n/bin:$PATH -RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N ''\ - && cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/core/env/centos7/docker-compose.yml b/packages/core/env/centos7/docker-compose.yml index 5a7744e5f..c476a5133 100644 --- a/packages/core/env/centos7/docker-compose.yml +++ b/packages/core/env/centos7/docker-compose.yml @@ -2,9 +2,16 @@ services: nodejs: build: . - image: nikita_core_centos7 container_name: nikita_core_centos7_nodejs - volumes: - - ../../../../:/nikita environment: NIKITA_TEST_MODULE: /nikita/packages/core/env/centos7/test.coffee + image: nikita_core_centos7 + networks: + - nikita + volumes: + - ../../../../:/nikita + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/core/env/centos7/test.coffee b/packages/core/env/centos7/test.coffee index 9d64562e5..ab92095f1 100644 --- a/packages/core/env/centos7/test.coffee +++ b/packages/core/env/centos7/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: conditions_if_os: true conditions_if_os: diff --git a/packages/core/env/chown/index.coffee b/packages/core/env/chown/index.coffee deleted file mode 100644 index 2ba04abe9..000000000 --- a/packages/core/env/chown/index.coffee +++ /dev/null @@ -1,48 +0,0 @@ - -path = require 'path' -runner = require '@nikitajs/lxd-runner' - -runner - cwd: '/nikita/packages/core' - container: 'nikita-core-chown' - logdir: path.resolve __dirname, './logs' - cluster: - containers: - 'nikita-core-chown': - image: 'images:almalinux/8' - properties: - 'environment.NIKITA_TEST_MODULE': '/nikita/packages/core/env/chown/test.coffee' - 'raw.idmap': if parseInt(process.env['NIKITA_LXD_IN_VAGRANT']) - then 'both 1000 0' - else "uid #{process.getuid()} 0\ngid #{process.getgid()} 0" - disk: - nikitadir: - path: '/nikita' - source: process.env['NIKITA_HOME'] or path.join(__dirname, '../../../../') - ssh: enabled: true - provision_container: ({config}) -> - await @lxc.exec - $header: 'Node.js' - container: config.container - command: ''' - if command -v node ; then exit 42; fi - yum install -y tar - curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash - . ~/.bashrc - nvm install 16 - ''' - trap: true - code: [0, 42] - await @lxc.exec - $header: 'SSH keys' - container: config.container - command: """ - mkdir -p /root/.ssh && chmod 700 /root/.ssh - if [ ! -f /root/.ssh/id_ed25519 ]; then - ssh-keygen -t ed25519 -f /root/.ssh/id_ed25519 -N '' - cat /root/.ssh/id_ed25519.pub > /root/.ssh/authorized_keys - fi - """ - trap: true -.catch (err) -> - console.error err diff --git a/packages/core/env/chown/index.js b/packages/core/env/chown/index.js new file mode 100644 index 000000000..6101f044d --- /dev/null +++ b/packages/core/env/chown/index.js @@ -0,0 +1,58 @@ + +import path from 'node:path'; +import dedent from 'dedent'; +import runner from '@nikitajs/lxd-runner'; +const __dirname = new URL( '.', import.meta.url).pathname + +runner({ + cwd: '/nikita/packages/core', + container: 'nikita-core-chown', + logdir: path.resolve(__dirname, './logs'), + cluster: { + containers: { + 'nikita-core-chown': { + image: 'images:almalinux/8', + properties: { + 'environment.NIKITA_TEST_MODULE': '/nikita/packages/core/env/chown/test.coffee', + 'raw.idmap': parseInt(process.env['NIKITA_LXD_IN_VAGRANT']) ? 'both 1000 0' : `uid ${process.getuid()} 0\ngid ${process.getgid()} 0` + }, + disk: { + nikitadir: { + path: '/nikita', + source: process.env['NIKITA_HOME'] || path.join(__dirname, '../../../../') + } + }, + ssh: { + enabled: true + } + } + }, + provision_container: async function({config}) { + await this.lxc.exec({ + $header: 'Node.js', + container: config.container, + command: dedent` + if command -v node ; then exit 42; fi + yum install -y tar + curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash + . ~/.bashrc + nvm install 16 + `, + trap: true, + code: [0, 42] + }); + await this.lxc.exec({ + $header: 'SSH keys', + container: config.container, + command: dedent` + mkdir -p /root/.ssh && chmod 700 /root/.ssh + if [ ! -f /root/.ssh/id_ed25519 ]; then + ssh-keygen -t ed25519 -f /root/.ssh/id_ed25519 -N '' + cat /root/.ssh/id_ed25519.pub > /root/.ssh/authorized_keys + fi + `, + trap: true + }); + } + } +}); diff --git a/packages/core/env/chown/test.coffee b/packages/core/env/chown/test.coffee index 85e8ffc0c..614f3a37f 100644 --- a/packages/core/env/chown/test.coffee +++ b/packages/core/env/chown/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: chown: true config: [ diff --git a/packages/core/env/run.sh b/packages/core/env/run.sh index 62c056875..a2d9ccbb0 100755 --- a/packages/core/env/run.sh +++ b/packages/core/env/run.sh @@ -5,8 +5,8 @@ cd `pwd`/`dirname ${BASH_SOURCE}` ./arch_chroot/run.sh ./centos7/run.sh -npx coffee env/chown/index.coffee run -npx coffee env/ssh/index.coffee run +node ./chown/index.js run +node ./ssh/index.js run ./sudo/run.sh ./ubuntu-14.04/run.sh ./ubuntu-22.04/run.sh diff --git a/packages/core/env/ssh/index.coffee b/packages/core/env/ssh/index.coffee deleted file mode 100644 index ed978a282..000000000 --- a/packages/core/env/ssh/index.coffee +++ /dev/null @@ -1,76 +0,0 @@ - -path = require 'path' -runner = require '@nikitajs/lxd-runner' - -runner - cwd: '/nikita/packages/core' - container: 'nikita-core-ssh' - logdir: path.resolve __dirname, './logs' - test_user: 1234 - cluster: - containers: - 'nikita-core-ssh': - image: 'images:almalinux/8' - properties: - 'environment.NIKITA_TEST_MODULE': '/nikita/packages/core/env/ssh/test.coffee' - 'environment.HOME': '/home/source' # Fix, LXD doesnt set HOME with --user - 'raw.idmap': if process.env['NIKITA_LXD_IN_VAGRANT'] - then 'both 1000 1234' - else "both #{process.getuid()} 1234" - disk: - nikitadir: - path: '/nikita' - source: process.env['NIKITA_HOME'] or path.join(__dirname, '../../../../') - ssh: enabled: true - provision_container: ({config}) -> - await @lxc.exec - $header: 'Dependencies' - container: config.container - command: ''' - # nvm require the tar commands - yum install -y tar - ''' - await @lxc.exec - $header: 'User `source`' - container: config.container - command: ''' - if ! id -u 1234 ; then - useradd -m -s /bin/bash -u 1234 source - fi - mkdir -p /home/source/.ssh && chmod 700 /home/source/.ssh - if [ ! -f /home/source/.ssh/id_rsa ]; then - ssh-keygen -t rsa -f /home/source/.ssh/id_rsa -N '' - fi - chown -R source /home/source/ - ''' - trap: true - await @lxc.exec - $header: 'Node.js' - container: config.container - command: """ - if command -v node ; then exit 42; fi - curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash - . ~/.bashrc - nvm install 16 - """ - user: '1234' - shell: 'bash' - trap: true - code: [0, 42] - await @lxc.exec - $header: 'User `target`' - container: config.container - command: ''' - if ! id -u 1235; then - useradd -m -s /bin/bash -u 1235 target - fi - mkdir -p /home/target/.ssh && chmod 700 /home/target/.ssh - pubkey=`cat /home/source/.ssh/id_rsa.pub` - if ! cat /home/target/.ssh/authorized_keys | grep $pubkey; then - echo $pubkey > /home/target/.ssh/authorized_keys - fi - chown -R target /home/target/ - ''' - trap: true -.catch (err) -> - console.error err diff --git a/packages/core/env/ssh/index.js b/packages/core/env/ssh/index.js new file mode 100644 index 000000000..537b09352 --- /dev/null +++ b/packages/core/env/ssh/index.js @@ -0,0 +1,88 @@ + +import path from 'node:path'; +import dedent from 'dedent'; +import runner from '@nikitajs/lxd-runner'; +const __dirname = new URL( '.', import.meta.url).pathname + +runner({ + cwd: '/nikita/packages/core', + container: 'nikita-core-ssh', + logdir: path.resolve(__dirname, './logs'), + test_user: 1234, + cluster: { + containers: { + 'nikita-core-ssh': { + image: 'images:almalinux/8', + properties: { + 'environment.NIKITA_TEST_MODULE': '/nikita/packages/core/env/ssh/test.coffee', + 'environment.HOME': '/home/source', // Fix, LXD doesnt set HOME with --user + 'raw.idmap': process.env['NIKITA_LXD_IN_VAGRANT'] ? 'both 1000 1234' : `both ${process.getuid()} 1234` + }, + disk: { + nikitadir: { + path: '/nikita', + source: process.env['NIKITA_HOME'] || path.join(__dirname, '../../../../') + } + }, + ssh: { + enabled: true + } + } + }, + provision_container: async function({config}) { + await this.lxc.exec({ + $header: 'Dependencies', + container: config.container, + command: dedent` + # nvm require the tar commands + yum install -y tar + ` + }); + await this.lxc.exec({ + $header: 'User `source`', + container: config.container, + command: dedent` + if ! id -u 1234 ; then + useradd -m -s /bin/bash -u 1234 source + fi + mkdir -p /home/source/.ssh && chmod 700 /home/source/.ssh + if [ ! -f /home/source/.ssh/id_rsa ]; then + ssh-keygen -t rsa -f /home/source/.ssh/id_rsa -N '' + fi + chown -R source /home/source/ + `, + trap: true + }); + await this.lxc.exec({ + $header: 'Node.js', + container: config.container, + command: dedent` + if command -v node ; then exit 42; fi + curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash + . ~/.bashrc + nvm install 16 + `, + user: '1234', + shell: 'bash', + trap: true, + code: [0, 42] + }); + await this.lxc.exec({ + $header: 'User `target`', + container: config.container, + command: dedent` + if ! id -u 1235; then + useradd -m -s /bin/bash -u 1235 target + fi + mkdir -p /home/target/.ssh && chmod 700 /home/target/.ssh + pubkey=\`cat /home/source/.ssh/id_rsa.pub\` + if ! cat /home/target/.ssh/authorized_keys | grep $pubkey; then + echo $pubkey > /home/target/.ssh/authorized_keys + fi + chown -R target /home/target/ + `, + trap: true + }); + } + } +}); diff --git a/packages/core/env/ssh/test.coffee b/packages/core/env/ssh/test.coffee index a1a6e823b..47558edc6 100644 --- a/packages/core/env/ssh/test.coffee +++ b/packages/core/env/ssh/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: ssh: true config: [ diff --git a/packages/core/env/sudo/Dockerfile b/packages/core/env/sudo/Dockerfile index be2064b26..4b2709121 100644 --- a/packages/core/env/sudo/Dockerfile +++ b/packages/core/env/sudo/Dockerfile @@ -1,5 +1,5 @@ FROM centos:7.9.2009 -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " RUN \ # Install Node dependencies diff --git a/packages/core/env/sudo/docker-compose.yml b/packages/core/env/sudo/docker-compose.yml index 1458a1172..4f0595ab4 100644 --- a/packages/core/env/sudo/docker-compose.yml +++ b/packages/core/env/sudo/docker-compose.yml @@ -2,9 +2,16 @@ services: nodejs: build: . - image: nikita_core_sudo container_name: nikita_core_sudo_nodejs - volumes: - - ../../../../:/home/nikita/work environment: NIKITA_TEST_MODULE: /home/nikita/work/packages/core/env/sudo/test.coffee + image: nikita_core_sudo + networks: + - nikita + volumes: + - ../../../../:/home/nikita/work + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/core/env/sudo/test.coffee b/packages/core/env/sudo/test.coffee index 9f169cd88..d711cf919 100644 --- a/packages/core/env/sudo/test.coffee +++ b/packages/core/env/sudo/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: sudo: true service: diff --git a/packages/core/env/ubuntu-14.04/Dockerfile b/packages/core/env/ubuntu-14.04/Dockerfile index c368437b9..dc188864f 100644 --- a/packages/core/env/ubuntu-14.04/Dockerfile +++ b/packages/core/env/ubuntu-14.04/Dockerfile @@ -1,14 +1,13 @@ FROM ubuntu:trusty -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " -RUN \ - apt update -y && \ - # Install Node.js dependencies - apt install -y build-essential curl git && \ - # Install SSH and sudo - apt-get install -y openssh-server sudo && \ - ssh-keygen -A && \ - mkdir -p /run/sshd +RUN apt update -y && \ + # Install Node.js dependencies + apt install -y build-essential curl git iputils-ping && \ + # Install SSH and sudo + apt-get install -y openssh-server sudo && \ + ssh-keygen -A && \ + mkdir -p /run/sshd ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita @@ -16,20 +15,18 @@ WORKDIR /nikita/packages/core # Sudo User RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ - mkdir -p /home/nikita && \ - chown nikita /home/nikita && \ - chmod 700 /home/nikita && \ - echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita # Install Node.js # Note, Ubuntu 14.04 incompatible with Node.js >= 18 -RUN \ - curl -L https://git.io/n-install | bash -s -- -y 16.19 +RUN curl -L https://git.io/n-install | bash -s -- -y 16.19 ENV PATH /home/nikita/n/bin:$PATH -RUN \ - ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ - cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/core/env/ubuntu-14.04/docker-compose.yml b/packages/core/env/ubuntu-14.04/docker-compose.yml index 57e5ac016..fe97d8f38 100644 --- a/packages/core/env/ubuntu-14.04/docker-compose.yml +++ b/packages/core/env/ubuntu-14.04/docker-compose.yml @@ -2,11 +2,18 @@ services: nodejs: build: . - image: nikita_core_ubuntu_1404 container_name: nikita_core_ubuntu_1404_nodejs - volumes: - - ../../../../:/nikita environment: NIKITA_TEST_MODULE: /nikita/packages/core/env/ubuntu-14.04/test.coffee DEBUG: 1 # DEBUG: 0 + image: nikita_core_ubuntu_1404 + networks: + - nikita + volumes: + - ../../../../:/nikita + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/core/env/ubuntu-14.04/test.coffee b/packages/core/env/ubuntu-14.04/test.coffee index c86398d4b..6f256b7f3 100644 --- a/packages/core/env/ubuntu-14.04/test.coffee +++ b/packages/core/env/ubuntu-14.04/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: conditions_if_os: true conditions_if_os: diff --git a/packages/core/env/ubuntu-22.04/Dockerfile b/packages/core/env/ubuntu-22.04/Dockerfile index 67578f228..c3241f7de 100644 --- a/packages/core/env/ubuntu-22.04/Dockerfile +++ b/packages/core/env/ubuntu-22.04/Dockerfile @@ -1,11 +1,18 @@ FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " RUN apt update -y && \ - # Install Node.js dependencies - apt install -y build-essential curl git && \ - DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata && \ - # Install SSH and sudo - apt-get install -y openssh-server sudo && \ + DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC \ + apt install -y \ + # System + tzdata \ + # Node.js dependencies + build-essential curl git iputils-ping \ + # SSH server and client + openssh-server \ + # Sudo to start ssh + sudo && \ + # SSH configuration ssh-keygen -A && \ mkdir -p /run/sshd @@ -13,7 +20,7 @@ ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita WORKDIR /nikita/packages/core -# Sudo User +# User as sudoer RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ mkdir -p /home/nikita && \ chown nikita /home/nikita && \ @@ -21,12 +28,13 @@ RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita -# Install Node.js +# SSH certificate +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys + +# Node.js ENV NODE_VERSION stable RUN curl -L https://git.io/n-install | bash -s -- -y ENV PATH /home/nikita/n/bin:$PATH -RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ - cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys - ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/core/env/ubuntu-22.04/docker-compose.yml b/packages/core/env/ubuntu-22.04/docker-compose.yml index 903374af0..9b094c5f4 100644 --- a/packages/core/env/ubuntu-22.04/docker-compose.yml +++ b/packages/core/env/ubuntu-22.04/docker-compose.yml @@ -2,11 +2,18 @@ services: nodejs: build: . - image: nikita_core_ubuntu_2204 container_name: nikita_core_ubuntu_2204_nodejs - volumes: - - ../../../../:/nikita environment: NIKITA_TEST_MODULE: /nikita/packages/core/env/ubuntu-22.04/test.coffee DEBUG: 1 # DEBUG: 0 + image: nikita_core_ubuntu_2204 + networks: + - nikita + volumes: + - ../../../../:/nikita + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/core/env/ubuntu-22.04/test.coffee b/packages/core/env/ubuntu-22.04/test.coffee index 20f7e0d5d..44fed2478 100644 --- a/packages/core/env/ubuntu-22.04/test.coffee +++ b/packages/core/env/ubuntu-22.04/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: conditions_if_os: true conditions_if_os: diff --git a/packages/core/lib/actions/assert/index.js b/packages/core/lib/actions/assert/index.js index 68ee526bc..30858281d 100644 --- a/packages/core/lib/actions/assert/index.js +++ b/packages/core/lib/actions/assert/index.js @@ -1,10 +1,10 @@ // Dependencies -const definitions = require('./schema.json'); -const utils = require('../../utils'); +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { hooks: { on_action: function(action) { action.handler = ((handler) => diff --git a/packages/core/lib/actions/call/README.md b/packages/core/lib/actions/call/README.md index 49918839e..274b37c8a 100644 --- a/packages/core/lib/actions/call/README.md +++ b/packages/core/lib/actions/call/README.md @@ -41,7 +41,7 @@ assert(key === 'value') ```js const value = await nikita(function(){ await this.fs.base.writeFile({ - content: 'module.exports = ({config}) => "my secret"', + content: 'export default ({config}) => "my secret"', target: '/tmp/my_module' }) return this.call( '/tmp/my_module' ) diff --git a/packages/core/lib/actions/call/index.js b/packages/core/lib/actions/call/index.js index c01f9c0ef..853c958cd 100644 --- a/packages/core/lib/actions/call/index.js +++ b/packages/core/lib/actions/call/index.js @@ -1,10 +1,10 @@ // Dependencies -const path = require('path'); -const {mutate} = require('mixme'); +import path from 'node:path'; +import {mutate} from 'mixme'; // Action -module.exports = { +export default { hooks: { on_action: async function(action) { if (typeof action.metadata.argument !== 'string') { @@ -16,7 +16,7 @@ module.exports = { if (mod.startsWith('.')) { mod = path.resolve(process.cwd(), mod); } - mod = await require.main.require(mod); + mod = (await import(mod)).default; // The loaded action can have its own interpretation of an argument. // In order to avoid any conflict, we simply remove the // `action.metadata.argument` property. diff --git a/packages/core/lib/actions/execute/assert/index.js b/packages/core/lib/actions/execute/assert/index.js index 4894178db..0422c4e85 100644 --- a/packages/core/lib/actions/execute/assert/index.js +++ b/packages/core/lib/actions/execute/assert/index.js @@ -1,11 +1,11 @@ // Dependencies -const utils = require('../../../utils'); -const definitions = require('./schema.json') +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (Buffer.isBuffer(config.content)) { config.content = config.content.toString(); @@ -46,7 +46,7 @@ module.exports = { } // Content is a regexp if ((config.content != null) && utils.regexp.is(config.content)) { - ({stdout} = (await this.execute(config))); + let {stdout} = await this.execute(config); if (config.trim) { stdout = stdout.trim(); } diff --git a/packages/core/lib/actions/execute/assert/schema.json b/packages/core/lib/actions/execute/assert/schema.json index b42eeff2f..321ab8d08 100644 --- a/packages/core/lib/actions/execute/assert/schema.json +++ b/packages/core/lib/actions/execute/assert/schema.json @@ -28,7 +28,7 @@ "description": "The error message to throw if assert failed." }, "not": { - "$ref": "module://@nikitajs/core/lib/actions/assert#/definitions/config/properties/not" + "$ref": "module://@nikitajs/core/actions/assert#/definitions/config/properties/not" }, "trim": { "type": "boolean", diff --git a/packages/core/lib/actions/execute/index.js b/packages/core/lib/actions/execute/index.js index 4172e281a..30c7dd76a 100644 --- a/packages/core/lib/actions/execute/index.js +++ b/packages/core/lib/actions/execute/index.js @@ -1,9 +1,10 @@ // Dependencies -const exec = require('ssh2-exec'); -const execProm = require('ssh2-exec/promise'); -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import exec from 'ssh2-exec'; +import execPromise from 'ssh2-exec/promises'; +import utils from '@nikitajs/core/utils'; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Errors const errors = { @@ -17,7 +18,7 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function({ config, metadata, @@ -25,9 +26,7 @@ module.exports = { ssh }) { // Validate parameters - if (config.mode == null) { - config.mode = 0o500; - } + config.mode ??= 0o500; if (typeof config.command === 'function') { config.command = (await this.call(config, config.command)); } @@ -41,7 +40,6 @@ module.exports = { config.command = `set -e\n${config.command}`; } config.command_original = `${config.command}`; - // sudo = await find ({config: {sudo}}) -> sudo const dry = (await find(function({ config: {dry} }) { @@ -64,24 +62,22 @@ module.exports = { const results = []; for (const k in config.env) { const v = config.env[k]; - results.push(`export ${k}=${utils.string.escapeshellarg(v)}\n`); + results.push(`export ${k}=${esa(v)}\n`); } return results; })() ).join('\n'); - const env_export_hash = utils.string.hash(env_export_content); } // Guess current username - const current_username = utils.os.whoami(ssh); + const current_username = utils.os.whoami({ssh}); // Sudo if (config.sudo) { if (current_username === 'root') { config.sudo = false; } else { - if (!['bash', 'arch_chroot'].some(function(k) { - return config[k]; - })) { - config.bash = 'bash'; + // Sudo commands are executed as a bash script unless arch_chroout is enabled + if (!["bash", "arch_chroot"].some((k) => config[k])) { + config.bash = "bash"; } } } @@ -105,9 +101,9 @@ module.exports = { level: 'INFO' }); await this.fs.base.writeFile({ - $sudo: config.sudo, + $sudo: config.sudo, // Is it really necessary ? content: env_export_content, - mode: 0o500, + mode: config.mode, target: env_export_target, uid: config.uid }); @@ -130,7 +126,7 @@ module.exports = { config.command = `sudo ${config.command}`; } await this.fs.base.writeFile({ - $sudo: config.sudo, + $sudo: config.sudo, // Is it really necessary ? target: `${target}`, content: `${command}`, mode: config.mode @@ -139,10 +135,7 @@ module.exports = { } else if (config.bash) { const command = config.command; const target = path.join(metadata.tmpdir, `execute-bash-${utils.string.hash(config.command)}`); - log({ - message: `Writing bash script to ${JSON.stringify(target)}`, - level: 'INFO' - }); + log('INFO', `Writing bash script to ${JSON.stringify(target)}`); let cmd = `${config.bash} ${target}`; if (config.uid) { cmd = `su - ${config.uid} -c '${cmd}'`; @@ -165,7 +158,7 @@ module.exports = { // # Note, rm cannot be remove with arch_chroot enabled // config.command += " && code=`echo $?`; rm '#{target}'; exit $code" unless config.dirty await this.fs.base.writeFile({ - $sudo: config.sudo, + $sudo: config.sudo, // Is it really necessary ? content: command, mode: config.mode, target: target, @@ -212,7 +205,7 @@ module.exports = { end: false }); } - let stdout_stream_open = stderr_stream_open = false; + let stdout_stream_open = false; if (child.stdout && (config.stdout_return || config.stdout_log)) { child.stdout.on('data', function(data) { if (config.stdout_log) { @@ -233,6 +226,7 @@ module.exports = { } }); } + let stderr_stream_open = false; if (child.stderr && (config.stderr_return || config.stderr_log)) { child.stderr.on('data', function(data) { if (config.stderr_log) { @@ -253,11 +247,11 @@ module.exports = { } }); } + let exitCalled = false; return child.on("exit", function(code) { - log({ - message: `Command exit with status: ${code}`, - level: 'DEBUG' - }); + if (exitCalled) return; + exitCalled = true; + log('DEBUG', `Command exit with status: ${code}`); result.code = code; // Give it some time because the "exit" event is sometimes called // before the "stdout" "data" event when running `npm test` @@ -336,8 +330,8 @@ module.exports = { }, hooks: { on_action: { - after: ['@nikitajs/core/lib/plugins/execute', '@nikitajs/core/lib/plugins/ssh', '@nikitajs/core/lib/plugins/tools/path'], - before: ['@nikitajs/core/lib/plugins/metadata/schema', '@nikitajs/core/lib/plugins/metadata/tmpdir'], + after: ['@nikitajs/core/plugins/execute', '@nikitajs/core/plugins/ssh', '@nikitajs/core/plugins/tools/path'], + before: ['@nikitajs/core/plugins/metadata/schema', '@nikitajs/core/plugins/metadata/tmpdir'], handler: function({ config, metadata, @@ -356,7 +350,7 @@ module.exports = { config.arch_chroot_tmpdir = path.join('/opt', tmpdir); tmpdir = path.join(config.arch_chroot_rootdir, config.arch_chroot_tmpdir); const sudo = function(command) { - if (utils.os.whoami(ssh) === 'root') { + if (utils.os.whoami({ssh}) === 'root') { return command; } else { return `sudo ${command}`; @@ -364,7 +358,7 @@ module.exports = { }; const command = ['set -e', sudo(`[ -w ${config.arch_chroot_rootdir} ] || exit 2;`), sudo(`mkdir -p ${tmpdir};`), sudo(`chmod 700 ${tmpdir};`)].join('\n'); try { - await execProm(ssh, command); + await execPromise(ssh, command); } catch (error) { if (error.code === 2) { throw errors.NIKITA_EXECUTE_ARCH_CHROOT_ROOTDIR_NOT_EXIST({ @@ -384,7 +378,7 @@ module.exports = { } }, on_result: { - before: '@nikitajs/core/lib/plugins/ssh', + before: '@nikitajs/core/plugins/ssh', handler: async function({ action: {config, metadata, ssh} }) { @@ -398,14 +392,14 @@ module.exports = { return; } const sudo = function(command) { - if (utils.os.whoami(ssh) === 'root') { + if (utils.os.whoami({ssh}) === 'root') { return command; } else { return `sudo ${command}`; } }; const command = [sudo(`rm -rf ${metadata.tmpdir}`)].join('\n'); - return (await execProm(ssh, command)); + return (await execPromise(ssh, command)); } } }, diff --git a/packages/core/lib/actions/execute/wait/index.js b/packages/core/lib/actions/execute/wait/index.js index 83cd5351f..5961c5be6 100644 --- a/packages/core/lib/actions/execute/wait/index.js +++ b/packages/core/lib/actions/execute/wait/index.js @@ -1,10 +1,10 @@ // ## Dependencies -const utils = require('../../../utils'); -const definitions = require('./schema.json'); +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { let attempts = 0; const wait = function (timeout) { diff --git a/packages/core/lib/actions/execute/wait/schema.json b/packages/core/lib/actions/execute/wait/schema.json index a9b6c1db6..a58c4955e 100644 --- a/packages/core/lib/actions/execute/wait/schema.json +++ b/packages/core/lib/actions/execute/wait/schema.json @@ -22,7 +22,7 @@ "description": "Time interval in milliseconds between which we should wait before re-executing the command, default to 2s." }, "code": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/code", + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/code", "default": {} }, "retry": { @@ -31,13 +31,13 @@ "description": "Maximum number of attempts." }, "stdin_log": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/stdin_log" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/stdin_log" }, "stdout_log": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/stdout_log" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/stdout_log" }, "stderr_log": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/stderr_log" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/stderr_log" } }, "required": [ diff --git a/packages/core/lib/actions/fs/assert/index.js b/packages/core/lib/actions/fs/assert/index.js index 704ab15d1..7e410c0c7 100644 --- a/packages/core/lib/actions/fs/assert/index.js +++ b/packages/core/lib/actions/fs/assert/index.js @@ -1,11 +1,8 @@ // Dependencies -const pad = require('pad'); -const fs = require('fs'); -const utils = require('../../../utils'); -const definitions = require('./schema.json'); - -// TODO: remove indexOf usage -const indexOf = [].indexOf; +import pad from 'pad'; +import fs from 'fs'; +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; const errors = { NIKITA_FS_ASSERT_FILE_MISSING: function({config}) { @@ -107,11 +104,11 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function({config}) { // Cached version of `nikita.fs.base.lstat` const cache = {} - lstat = async (location) => { + const lstat = async (location) => { if (cache[location] != null) return cache[location]; return cache[location] = await this.fs.base.lstat(config.target) } diff --git a/packages/core/lib/actions/fs/assert/schema.json b/packages/core/lib/actions/fs/assert/schema.json index 1b302c3d1..09ea1bde9 100644 --- a/packages/core/lib/actions/fs/assert/schema.json +++ b/packages/core/lib/actions/fs/assert/schema.json @@ -54,12 +54,12 @@ "mode": { "type": "array", "items": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chmod#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/base/chmod#/definitions/config/properties/mode" }, "description": "Validate file permissions." }, "not": { - "$ref": "module://@nikitajs/core/lib/actions/assert#/definitions/config/properties/not" + "$ref": "module://@nikitajs/core/actions/assert#/definitions/config/properties/not" }, "sha1": { "type": "string", diff --git a/packages/core/lib/actions/fs/base/chmod/index.js b/packages/core/lib/actions/fs/base/chmod/index.js index faf611fca..f1043122a 100644 --- a/packages/core/lib/actions/fs/base/chmod/index.js +++ b/packages/core/lib/actions/fs/base/chmod/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const mode = typeof config.mode === 'number' ? config.mode.toString(8).substr(-4) diff --git a/packages/core/lib/actions/fs/base/chown/index.js b/packages/core/lib/actions/fs/base/chown/index.js index c8f36f7c0..ca96c6e6f 100644 --- a/packages/core/lib/actions/fs/base/chown/index.js +++ b/packages/core/lib/actions/fs/base/chown/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { // Normalization if (config.uid === false) { diff --git a/packages/core/lib/actions/fs/base/copy/index.js b/packages/core/lib/actions/fs/base/copy/index.js index 1cc357e34..e4aa431f4 100644 --- a/packages/core/lib/actions/fs/base/copy/index.js +++ b/packages/core/lib/actions/fs/base/copy/index.js @@ -1,8 +1,8 @@ // Dependencies -const dedent = require('dedent'); -const utils = require('../../../../utils'); -const definitions = require('./schema.json'); +import dedent from 'dedent'; +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; const errors = { NIKITA_FS_COPY_TARGET_ENOENT: ({config, error}) => @@ -15,7 +15,7 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function({config}) { try { return await this.execute(dedent` diff --git a/packages/core/lib/actions/fs/base/createReadStream/index.js b/packages/core/lib/actions/fs/base/createReadStream/index.js index f1e69fa02..688ba07c9 100644 --- a/packages/core/lib/actions/fs/base/createReadStream/index.js +++ b/packages/core/lib/actions/fs/base/createReadStream/index.js @@ -1,9 +1,9 @@ // Dependencies -const fs = require('ssh2-fs'); -const exec = require('ssh2-exec/promise'); -const utils = require('../../../../utils'); -const definitions = require('./schema.json'); +import fs from 'ssh2-fs'; +import exec from 'ssh2-exec/promises'; +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; const errors = { NIKITA_FS_CRS_NO_EVENT_HANDLER: () => @@ -35,7 +35,7 @@ const errors = { }; // ## Exports -module.exports = { +export default { handler: async function({ config, metadata, @@ -124,11 +124,11 @@ module.exports = { hooks: { on_action: { after: [ - '@nikitajs/core/lib/plugins/execute' + '@nikitajs/core/plugins/execute' ], before: [ - '@nikitajs/core/lib/plugins/metadata/schema', - '@nikitajs/core/lib/plugins/metadata/tmpdir' + '@nikitajs/core/plugins/metadata/schema', + '@nikitajs/core/plugins/metadata/tmpdir' ], handler: async function({ config, diff --git a/packages/core/lib/actions/fs/base/createWriteStream/index.js b/packages/core/lib/actions/fs/base/createWriteStream/index.js index 388f69bbb..b74b02daa 100644 --- a/packages/core/lib/actions/fs/base/createWriteStream/index.js +++ b/packages/core/lib/actions/fs/base/createWriteStream/index.js @@ -1,11 +1,11 @@ // Dependencies -const fs = require('ssh2-fs'); -const exec = require('ssh2-exec/promise'); -const utils = require('../../../../utils'); -const definitions = require('./schema.json'); +import fs from 'ssh2-fs'; +import exec from 'ssh2-exec/promises'; +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; -// ## Errors +// Errors const errors = { NIKITA_FS_CWS_TARGET_ENOENT: ({config}) => utils.error('NIKITA_FS_CWS_TARGET_ENOENT', [ @@ -21,7 +21,7 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function({ config, metadata, @@ -40,36 +40,30 @@ module.exports = { // config.mode ?= 0o644 # Node.js default to 0o666 // In append mode, we write to a copy of the target file located in a temporary location if (config.flags[0] === 'a') { - const whoami = utils.os.whoami(ssh); + const whoami = utils.os.whoami({ssh}); await exec(ssh, [ sudo(`[ ! -f '${config.target}' ] && exit`), sudo(`cp '${config.target}' '${config.target_tmp}'`), sudo(`chown ${whoami} '${config.target_tmp}'`)].join('\n') ); - log({ - message: "Append prepared by placing a copy of the original file in a temporary path", - level: 'INFO' - }); + log('INFO', "Append prepared by placing a copy of the original file in a temporary path"); } } catch (error) { - log({ - message: "Failed to place original file in temporary path", - level: 'ERROR' - }); + log('ERROR', "Failed to place original file in temporary path"); throw error; } // Start writing the content - log({ - message: 'Start writing bytes', - level: 'DEBUG' - }); + log('DEBUG', 'Start writing bytes'); await new Promise(async function(resolve, reject) { - const ws = (await fs.createWriteStream(ssh, config.target_tmp || config.target, { - flags: config.flags, - mode: config.mode - })); + const ws = await fs.createWriteStream( + ssh, + config.target_tmp || config.target, + { + flags: config.flags, + mode: config.mode, + } + ); config.stream(ws); - const error = false; // Quick fix ws sending both the error and close events on error ws.on('error', function(error) { if (error.code === 'ENOENT') { error = errors.NIKITA_FS_CWS_TARGET_ENOENT({ @@ -79,7 +73,7 @@ module.exports = { reject(error); }); ws.on('end', () => ws.destroy() ); - ws.on('close', () => error || resolve() ); + ws.on('close', () => resolve() ); }); // Replace the target file in append or sudo mode if (config.target_tmp) { @@ -100,11 +94,11 @@ module.exports = { hooks: { on_action: { after: [ - '@nikitajs/core/lib/plugins/execute' + '@nikitajs/core/plugins/execute' ], before: [ - '@nikitajs/core/lib/plugins/metadata/schema', - '@nikitajs/core/lib/plugins/metadata/tmpdir' + '@nikitajs/core/plugins/metadata/schema', + '@nikitajs/core/plugins/metadata/tmpdir' ], handler: async function({ config, diff --git a/packages/core/lib/actions/fs/base/createWriteStream/schema.json b/packages/core/lib/actions/fs/base/createWriteStream/schema.json index 94db98646..98ccaae07 100644 --- a/packages/core/lib/actions/fs/base/createWriteStream/schema.json +++ b/packages/core/lib/actions/fs/base/createWriteStream/schema.json @@ -12,7 +12,7 @@ "description": "Location where to write the temporary uploaded file before it is copied into its final destination, default to \"{tmpdir}/nikita_{YYMMDD}_{pid}_{rand}/{hash target}\"" }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chmod#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/base/chmod#/definitions/config/properties/mode" }, "stream": { "typeof": "function", diff --git a/packages/core/lib/actions/fs/base/exists/index.js b/packages/core/lib/actions/fs/base/exists/index.js index 5f339e706..da02250d7 100644 --- a/packages/core/lib/actions/fs/base/exists/index.js +++ b/packages/core/lib/actions/fs/base/exists/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { try { await this.fs.base.stat({ diff --git a/packages/core/lib/actions/fs/base/lstat/index.js b/packages/core/lib/actions/fs/base/lstat/index.js index b321a27fa..fbd0d3799 100644 --- a/packages/core/lib/actions/fs/base/lstat/index.js +++ b/packages/core/lib/actions/fs/base/lstat/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { return await this.fs.base.stat({ target: config.target, diff --git a/packages/core/lib/actions/fs/base/mkdir/index.js b/packages/core/lib/actions/fs/base/mkdir/index.js index 61ea4a066..c96ccb6d2 100644 --- a/packages/core/lib/actions/fs/base/mkdir/index.js +++ b/packages/core/lib/actions/fs/base/mkdir/index.js @@ -1,7 +1,7 @@ // Dependencies -const utils = require('../../../../utils'); -const definitions = require('./schema.json'); +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; const errors = { NIKITA_FS_MKDIR_TARGET_EEXIST: ({config}) => @@ -14,7 +14,7 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function({config}) { if (typeof config.mode === 'number') { // Convert mode into a string diff --git a/packages/core/lib/actions/fs/base/mkdir/schema.json b/packages/core/lib/actions/fs/base/mkdir/schema.json index d96104fc3..9506381f6 100644 --- a/packages/core/lib/actions/fs/base/mkdir/schema.json +++ b/packages/core/lib/actions/fs/base/mkdir/schema.json @@ -3,17 +3,17 @@ "type": "object", "properties": { "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chown#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/core/actions/fs/base/chown#/definitions/config/properties/gid" }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chmod#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/base/chmod#/definitions/config/properties/mode" }, "target": { "type": "string", "description": "Location of the directory to create." }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chown#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/base/chown#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/core/lib/actions/fs/base/readFile/index.js b/packages/core/lib/actions/fs/base/readFile/index.js index 478cd966f..2c30d68a8 100644 --- a/packages/core/lib/actions/fs/base/readFile/index.js +++ b/packages/core/lib/actions/fs/base/readFile/index.js @@ -1,10 +1,10 @@ // Dependencies -const utils = require('../../../../utils'); -const definitions = require('./schema.json'); +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { // Normalize options const buffers = []; diff --git a/packages/core/lib/actions/fs/base/readFile/schema.json b/packages/core/lib/actions/fs/base/readFile/schema.json index 56aea12b4..74ed4d5a4 100644 --- a/packages/core/lib/actions/fs/base/readFile/schema.json +++ b/packages/core/lib/actions/fs/base/readFile/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "encoding": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/createReadStream#/definitions/config/properties/encoding" + "$ref": "module://@nikitajs/core/actions/fs/base/createReadStream#/definitions/config/properties/encoding" }, "target": { "oneOf": [ diff --git a/packages/core/lib/actions/fs/base/readdir/index.js b/packages/core/lib/actions/fs/base/readdir/index.js index 1abbd6182..427bfef92 100644 --- a/packages/core/lib/actions/fs/base/readdir/index.js +++ b/packages/core/lib/actions/fs/base/readdir/index.js @@ -1,9 +1,9 @@ // Dependencies -const {Dirent, constants} = require('fs'); -const dedent = require('dedent'); -const utils = require('../../../../utils'); -const definitions = require('./schema.json'); +import {Dirent, constants} from 'fs'; +import dedent from 'dedent'; +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; const errors = { NIKITA_FS_READDIR_TARGET_ENOENT: ({config, error}) => @@ -16,7 +16,7 @@ const errors = { } // Action -module.exports = { +export default { handler: async function({config}) { // Note: -w work on macos, not on linux, it force raw printing of // non-printable characters. This is the default when output is not to a diff --git a/packages/core/lib/actions/fs/base/readlink/index.js b/packages/core/lib/actions/fs/base/readlink/index.js index 77f7b9865..4cbb3bd24 100644 --- a/packages/core/lib/actions/fs/base/readlink/index.js +++ b/packages/core/lib/actions/fs/base/readlink/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {stdout} = await this.execute({ command: `readlink ${config.target}` diff --git a/packages/core/lib/actions/fs/base/rename/index.js b/packages/core/lib/actions/fs/base/rename/index.js index 3e08634ab..c9d6cabaf 100644 --- a/packages/core/lib/actions/fs/base/rename/index.js +++ b/packages/core/lib/actions/fs/base/rename/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { return await this.execute({ command: `mv ${config.source} ${config.target}`, diff --git a/packages/core/lib/actions/fs/base/rmdir/index.js b/packages/core/lib/actions/fs/base/rmdir/index.js index 2bcb3db84..aac58d91f 100644 --- a/packages/core/lib/actions/fs/base/rmdir/index.js +++ b/packages/core/lib/actions/fs/base/rmdir/index.js @@ -1,9 +1,8 @@ // Dependencies -const utils = require('../../../../utils'); -const definitions = require('./schema.json'); - -const {escapeshellarg} = utils.string; +import utils from '@nikitajs/core/utils'; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; const errors = { NIKITA_FS_RMDIR_TARGET_ENOENT: ({config, error}) => @@ -16,14 +15,14 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function({ config, tools: {log} }) { try { await this.execute({ - command: [`[ ! -d ${escapeshellarg(config.target)} ] && exit 2`, !config.recursive ? `rmdir ${escapeshellarg(config.target)}` : `rm -R ${escapeshellarg(config.target)}`].join('\n') + command: [`[ ! -d ${esa(config.target)} ] && exit 2`, !config.recursive ? `rmdir ${esa(config.target)}` : `rm -R ${esa(config.target)}`].join('\n') }); log({ message: "Directory successfully removed", diff --git a/packages/core/lib/actions/fs/base/stat/README.md b/packages/core/lib/actions/fs/base/stat/README.md index 3e89303b7..21469600a 100644 --- a/packages/core/lib/actions/fs/base/stat/README.md +++ b/packages/core/lib/actions/fs/base/stat/README.md @@ -6,7 +6,7 @@ Retrieve file information. ## File information The `mode` parameter indicates the file type. For conveniency, the -`@nikitajs/core/lib/utils/stats` module provide functions to check each +`@nikitajs/core/utils/stats` module provide functions to check each possible file types. ## Example @@ -14,20 +14,20 @@ possible file types. Check if target is a file: ```js -const utils = require('@nikitajs/core/lib/utils'); +import utils from '@nikitajs/core/utils'; const {stats} = await nikita -.file.touch("/tmp/a_file") -.fs.base.stat("/tmp/a_file"); + .file.touch("/tmp/a_file") + .fs.base.stat("/tmp/a_file"); assert(utils.stats.isFile(stats.mode) === true); ``` Check if target is a directory: ```js -const utils = require('@nikitajs/core/lib/utils'); +import utils from '@nikitajs/core/utils'; const {stats} = await nikita -.fs.base.mkdir("/tmp/a_file") -.fs.base.stat("/tmp/a_file"); + .fs.base.mkdir("/tmp/a_file") + .fs.base.stat("/tmp/a_file"); assert(utils.stats.isDirectory(stats.mode) === true); ``` diff --git a/packages/core/lib/actions/fs/base/stat/index.js b/packages/core/lib/actions/fs/base/stat/index.js index 1147b708e..be7209345 100644 --- a/packages/core/lib/actions/fs/base/stat/index.js +++ b/packages/core/lib/actions/fs/base/stat/index.js @@ -1,11 +1,11 @@ // Dependencies -const dedent = require('dedent'); -const utils = require('../../../../utils'); -const definitionsIn = require('./schema.in.json'); -const definitionsOut = require('./schema.out.json'); +import dedent from 'dedent'; +import utils from '@nikitajs/core/utils'; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitionsIn from './schema.in.json' assert { type: "json" }; +import definitionsOut from './schema.out.json' assert { type: "json" }; -const {escapeshellarg} = utils.string; const errors = { NIKITA_FS_STAT_TARGET_ENOENT: ({config, error}) => utils.error('NIKITA_FS_STAT_TARGET_ENOENT', ['failed to stat the target, no file exists for target,', `got ${JSON.stringify(config.target)}`], { @@ -17,45 +17,41 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function({config}) { // Normalize configuration - if (config.dereference == null) { - config.dereference = true; - } - const dereference = config.dereference ? '-L' : ''; - try { - const {stdout} = await this.execute({ - command: dedent` - [ ! -e ${config.target} ] && exit 3 - if [ -d /private ]; then - stat ${dereference} -f '%Xp|%u|%g|%z|%a|%m' ${escapeshellarg(config.target)} # MacOS - else - stat ${dereference} -c '%f|%u|%g|%s|%X|%Y' ${escapeshellarg(config.target)} # Linux - fi - `, - trim: true - }); - const [rawmodehex, uid, gid, size, atime, mtime] = stdout.split('|'); - return { - stats: { - mode: parseInt(rawmodehex, 16), // dont know why `rawmodehex` was prefixed by `"0xa1ed"` - uid: parseInt(uid, 10), - gid: parseInt(gid, 10), - size: parseInt(size, 10), - atime: parseInt(atime, 10), - mtime: parseInt(mtime, 10) - } - }; - } catch (error) { + // config.dereference ??= true; + const dereference = config.dereference ? "-L" : ""; + const {stdout} = await this.execute({ + command: dedent` + [ ! -e ${config.target} ] && exit 3 + if [ -d /private ]; then + stat ${dereference} -f '%Xp|%u|%g|%z|%a|%m' ${esa(config.target)} # MacOS + else + stat ${dereference} -c '%f|%u|%g|%s|%X|%Y' ${esa(config.target)} # Linux + fi + `, + trim: true + }).catch( error => { if (error.exit_code === 3) { - error = errors.NIKITA_FS_STAT_TARGET_ENOENT({ + throw errors.NIKITA_FS_STAT_TARGET_ENOENT({ config: config, error: error }); } throw error; - } + }); + const [rawmodehex, uid, gid, size, atime, mtime] = stdout.split("|"); + return { + stats: { + mode: parseInt(rawmodehex, 16), // dont know why `rawmodehex` was prefixed by `"0xa1ed"` + uid: parseInt(uid, 10), + gid: parseInt(gid, 10), + size: parseInt(size, 10), + atime: parseInt(atime, 10), + mtime: parseInt(mtime, 10), + }, + }; }, metadata: { argument_to_config: 'target', diff --git a/packages/core/lib/actions/fs/base/stat/schema.in.json b/packages/core/lib/actions/fs/base/stat/schema.in.json index 9a0970edd..9fd9ecdc8 100644 --- a/packages/core/lib/actions/fs/base/stat/schema.in.json +++ b/packages/core/lib/actions/fs/base/stat/schema.in.json @@ -4,6 +4,7 @@ "properties": { "dereference": { "type": "boolean", + "default": true, "description": "Follow links, similar to `lstat`, default is \"true\", just like in the native Node.js `fs.stat` function, use `nikita.fs.lstat` to retrive link information." }, "target": { @@ -29,7 +30,7 @@ "type": "object", "properties": { "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chmod#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/base/chmod#/definitions/config/properties/mode" }, "uid": { "type": "integer", diff --git a/packages/core/lib/actions/fs/base/stat/schema.out.json b/packages/core/lib/actions/fs/base/stat/schema.out.json index c4ce0119f..688dfbbe5 100644 --- a/packages/core/lib/actions/fs/base/stat/schema.out.json +++ b/packages/core/lib/actions/fs/base/stat/schema.out.json @@ -5,7 +5,7 @@ "type": "object", "properties": { "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chmod#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/base/chmod#/definitions/config/properties/mode" }, "uid": { "type": "integer", diff --git a/packages/core/lib/actions/fs/base/symlink/index.js b/packages/core/lib/actions/fs/base/symlink/index.js index 64af9c5cf..ef9f351b0 100644 --- a/packages/core/lib/actions/fs/base/symlink/index.js +++ b/packages/core/lib/actions/fs/base/symlink/index.js @@ -1,15 +1,13 @@ // Dependencies -const utils = require('../../../../utils'); -const definitions = require('./schema.json'); - -const {escapeshellarg} = utils.string; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { await this.execute({ - command: `ln -sf ${escapeshellarg(config.source)} ${escapeshellarg(config.target)}` + command: `ln -sf ${esa(config.source)} ${esa(config.target)}` }); }, metadata: { diff --git a/packages/core/lib/actions/fs/base/unlink/index.js b/packages/core/lib/actions/fs/base/unlink/index.js index 0f12a414c..ec9832c2c 100644 --- a/packages/core/lib/actions/fs/base/unlink/index.js +++ b/packages/core/lib/actions/fs/base/unlink/index.js @@ -1,8 +1,8 @@ // ## Dependencies -const dedent = require('dedent'); -const utils = require('../../../../utils'); -const definitions = require('./schema.json'); +import dedent from 'dedent'; +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; const errors = { NIKITA_FS_UNLINK_ENOENT: function({config}) { @@ -20,7 +20,7 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function({config}) { try { // `! -e`: file does not exist diff --git a/packages/core/lib/actions/fs/base/writeFile/index.js b/packages/core/lib/actions/fs/base/writeFile/index.js index b929539fd..6b9b17d2f 100644 --- a/packages/core/lib/actions/fs/base/writeFile/index.js +++ b/packages/core/lib/actions/fs/base/writeFile/index.js @@ -1,7 +1,7 @@ // Dependencies -const utils = require('../../../../utils'); -const definitions = require('./schema.json'); +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; const errors = { NIKITA_FS_STAT_TARGET_ENOENT: ({config, err}) => @@ -19,7 +19,7 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function({ config, tools: {path}, diff --git a/packages/core/lib/actions/fs/base/writeFile/schema.json b/packages/core/lib/actions/fs/base/writeFile/schema.json index f482c5bf5..c564b1a02 100644 --- a/packages/core/lib/actions/fs/base/writeFile/schema.json +++ b/packages/core/lib/actions/fs/base/writeFile/schema.json @@ -27,7 +27,7 @@ "description": "Location where to write the temporary uploaded file before it is copied into its final destination, default to \"{tmpdir}/nikita_{YYMMDD}_{pid}_{rand}/{hash target}\"" }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/createWriteStream#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/base/createWriteStream#/definitions/config/properties/mode" }, "target": { "oneOf": [ diff --git a/packages/core/lib/actions/fs/chmod/index.js b/packages/core/lib/actions/fs/chmod/index.js index a394d25cd..25c182f2d 100644 --- a/packages/core/lib/actions/fs/chmod/index.js +++ b/packages/core/lib/actions/fs/chmod/index.js @@ -1,9 +1,9 @@ // Dependencies -utils = require('../../../utils'); -const definitions = require('./schema.json'); +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { let stats; if (config.stats) { diff --git a/packages/core/lib/actions/fs/chmod/schema.json b/packages/core/lib/actions/fs/chmod/schema.json index 5b2cac539..056f8ff43 100644 --- a/packages/core/lib/actions/fs/chmod/schema.json +++ b/packages/core/lib/actions/fs/chmod/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chmod#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/base/chmod#/definitions/config/properties/mode" }, "stats": { "typeof": "object", diff --git a/packages/core/lib/actions/fs/chown/index.js b/packages/core/lib/actions/fs/chown/index.js index 9a4a27e02..de77ec398 100644 --- a/packages/core/lib/actions/fs/chown/index.js +++ b/packages/core/lib/actions/fs/chown/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { if (!(config.uid != null || config.gid != null)) { throw Error("Missing one of uid or gid option"); @@ -19,8 +19,7 @@ module.exports = { gid = typeof config.gid === "number" ? config.gid - : (({ stdout } = await this.execute(`id -g '${config.gid}'`)), - parseInt(stdout.trim())); + : parseInt((await this.execute(`id -g '${config.gid}'`)).stdout.trim()); } // Retrieve target stats let stats; diff --git a/packages/core/lib/actions/fs/chown/schema.json b/packages/core/lib/actions/fs/chown/schema.json index e2d51480d..40306a4d8 100644 --- a/packages/core/lib/actions/fs/chown/schema.json +++ b/packages/core/lib/actions/fs/chown/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chown#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/core/actions/fs/base/chown#/definitions/config/properties/gid" }, "stats": { "typeof": "object", @@ -14,7 +14,7 @@ "description": "Location of the file which permissions will change." }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chown#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/base/chown#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/core/lib/actions/fs/copy/index.js b/packages/core/lib/actions/fs/copy/index.js index d1ba124d3..0457519d8 100644 --- a/packages/core/lib/actions/fs/copy/index.js +++ b/packages/core/lib/actions/fs/copy/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); -const utils = require('../../../utils'); +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { $status, log, path } }) { // Retrieve stats information about the source unless provided through the "source_stats" option. const source_stats = await ( async () => { diff --git a/packages/core/lib/actions/fs/copy/schema.json b/packages/core/lib/actions/fs/copy/schema.json index 7c895d364..7d88d39f9 100644 --- a/packages/core/lib/actions/fs/copy/schema.json +++ b/packages/core/lib/actions/fs/copy/schema.json @@ -3,10 +3,10 @@ "type": "object", "properties": { "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/gid" }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chmod#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/chmod#/definitions/config/properties/mode" }, "parent": { "oneOf": [ @@ -17,13 +17,13 @@ "type": "object", "properties": { "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/mkdir#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/core/actions/fs/mkdir#/definitions/config/properties/gid" }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/mkdir#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/mkdir#/definitions/config/properties/mode" }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/mkdir#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/mkdir#/definitions/config/properties/uid" } } } @@ -42,7 +42,7 @@ "source_stats": { "type": "object", "description": "Short-circuit to prevent source stat retrieval if already at our\ndisposal.", - "$ref": "module://@nikitajs/core/lib/actions/fs/base/stat#/definitions/output/properties/stats" + "$ref": "module://@nikitajs/core/actions/fs/base/stat#/definitions/output/properties/stats" }, "target": { "type": "string", @@ -51,10 +51,10 @@ "target_stats": { "type": "object", "description": "Short-circuit to prevent target stat retrieval if already at our\ndisposal.", - "$ref": "module://@nikitajs/core/lib/actions/fs/base/stat#/definitions/output/properties/stats" + "$ref": "module://@nikitajs/core/actions/fs/base/stat#/definitions/output/properties/stats" }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/core/lib/actions/fs/glob/index.js b/packages/core/lib/actions/fs/glob/index.js index ec53df34d..a13dd7d57 100644 --- a/packages/core/lib/actions/fs/glob/index.js +++ b/packages/core/lib/actions/fs/glob/index.js @@ -1,7 +1,9 @@ // Dependencies -const { Minimatch } = require("minimatch"); -const utils = require('../../../utils'); -const definitions = require('./schema.json'); +import mm from "minimatch"; +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; + +const { Minimatch } = mm; // Utility const getprefix = function(pattern) { @@ -32,7 +34,7 @@ const getprefix = function(pattern) { }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { path } }) { if (config.minimatch == null) { config.minimatch = {}; diff --git a/packages/core/lib/actions/fs/hash/index.js b/packages/core/lib/actions/fs/hash/index.js index 5f2a3d705..104da353a 100644 --- a/packages/core/lib/actions/fs/hash/index.js +++ b/packages/core/lib/actions/fs/hash/index.js @@ -1,88 +1,120 @@ // Dependencies -const crypto = require('crypto'); -const dedent = require('dedent'); -const utils = require('../../../utils'); -const definitions = require('./schema.json'); +import crypto from "crypto"; +import dedent from "dedent"; +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; const errors = { - NIKITA_FS_HASH_FILETYPE_UNSUPPORTED: function({config, stats}) { - return utils.error('NIKITA_FS_HASH_FILETYPE_UNSUPPORTED', ['only "File" and "Directory" types are supported,', `got ${JSON.stringify(utils.stats.type(stats.mode))},`, `location is ${JSON.stringify(config.target)}`], { - target: config.target - }); + NIKITA_FS_HASH_FILETYPE_UNSUPPORTED: function ({ config, stats }) { + return utils.error( + "NIKITA_FS_HASH_FILETYPE_UNSUPPORTED", + [ + 'only "File" and "Directory" types are supported,', + `got ${JSON.stringify(utils.stats.type(stats.mode))},`, + `location is ${JSON.stringify(config.target)}`, + ], + { + target: config.target, + } + ); + }, + NIKITA_FS_HASH_MISSING_OPENSSL: function () { + return utils.error("NIKITA_FS_HASH_MISSING_OPENSSL", [ + "the `openssl` command must be present on your system,", + "please install it before pursuing", + ]); }, - NIKITA_FS_HASH_MISSING_OPENSSL: function() { - return utils.error('NIKITA_FS_HASH_MISSING_OPENSSL', ['the `openssl` command must be present on your system,', "please install it before pursuing"]); + NIKITA_FS_HASH_HASH_NOT_EQUAL: function ({ config, hash }) { + return utils.error( + "NIKITA_FS_HASH_HASH_NOT_EQUAL", + [ + "the target hash does not equal the execpted value,", + `got ${JSON.stringify(hash)},`, + `expected ${JSON.stringify(config.hash)}`, + ], + { + target: config.target, + } + ); }, - NIKITA_FS_HASH_HASH_NOT_EQUAL: function({config, hash}) { - return utils.error('NIKITA_FS_HASH_HASH_NOT_EQUAL', ['the target hash does not equal the execpted value,', `got ${JSON.stringify(hash)},`, `expected ${JSON.stringify(config.hash)}`], { - target: config.target - }); - } }; // Action -module.exports = { - handler: async function({config}) { - const {stats} = config.stats ? config.stats : (await this.fs.base.stat(config.target)); - if (!(utils.stats.isFile(stats.mode) || utils.stats.isDirectory(stats.mode))) { +export default { + handler: async function ({ config }) { + const { stats } = config.stats + ? config.stats + : await this.fs.base.stat(config.target); + if ( + !utils.stats.isFile(stats.mode) && + !utils.stats.isDirectory(stats.mode) + ) { throw errors.NIKITA_FS_HASH_FILETYPE_UNSUPPORTED({ config: config, - stats: stats + stats: stats, }); } - let hash = null; - try { + const hash = await (async () => { // Target is a directory if (utils.stats.isDirectory(stats.mode)) { - const {files} = (await this.fs.glob(`${config.target}/**`, { - dot: true - })); - const {stdout} = (await this.execute({ + const { files } = await this.fs.glob(`${config.target}/**`, { + dot: true, + }); + const { stdout } = await this.execute({ command: [ - 'command -v openssl >/dev/null || exit 2', - ...files.map(function(file) { - return `[ -f ${file} ] && openssl dgst -${config.algo} ${file} | sed 's/^.* \\([a-z0-9]*\\)$/\\1/g'`; - }), - 'exit 0' - ].join('\n'), - trim: true - })); - const hashs = utils.string.lines(stdout).filter(function(line) { - return /\w+/.test(line); - }).sort(); - hash = hashs.length === 0 ? crypto.createHash(config.algo).update('').digest('hex') : hashs.length === 1 ? hashs[0] : crypto.createHash(config.algo).update(hashs.join('')).digest('hex'); - // Target is a file + "command -v openssl >/dev/null || exit 2", + ...files.map( + (file) => + `[ -f ${file} ] && openssl dgst -${config.algo} ${file} | sed 's/^.* \\([a-z0-9]*\\)$/\\1/g'` + ), + "exit 0", + ].join("\n"), + trim: true, + }).catch((error) => { + if (error.exit_code === 2) { + throw errors.NIKITA_FS_HASH_MISSING_OPENSSL(); + } + throw error; + }); + const hashs = utils.string + .lines(stdout) + .filter((line) => /\w+/.test(line)) + .sort(); + return hashs.length === 0 + ? crypto.createHash(config.algo).update("").digest("hex") + : hashs.length === 1 + ? hashs[0] + : crypto.createHash(config.algo).update(hashs.join("")).digest("hex"); + // Target is a file } else if (utils.stats.isFile(stats.mode)) { - const {stdout} = (await this.execute({ + const { stdout: hash } = await this.execute({ command: dedent` command -v openssl >/dev/null || exit 2 openssl dgst -${config.algo} ${config.target} | sed 's/^.* \([a-z0-9]*\)$/\1/g' `, - trim: true - })); - hash = stdout; - } - } catch (error) { - if (error.exit_code === 2) { - throw errors.NIKITA_FS_HASH_MISSING_OPENSSL(); + trim: true, + }).catch((error) => { + if (error.exit_code === 2) { + throw errors.NIKITA_FS_HASH_MISSING_OPENSSL(); + } + throw error; + }); + return hash; } - if (error) { - throw error; - } - } + })(); if (config.hash && config.hash !== hash) { throw errors.NIKITA_FS_HASH_HASH_NOT_EQUAL({ config: config, - hash: hash + hash: hash, }); } return { - hash: hash + hash: hash, }; }, metadata: { - argument_to_config: 'target', + argument_to_config: "target", shy: true, - definitions: definitions - } + definitions: definitions, + }, }; diff --git a/packages/core/lib/actions/fs/link/index.js b/packages/core/lib/actions/fs/link/index.js index 16d4b757c..ec7002f23 100644 --- a/packages/core/lib/actions/fs/link/index.js +++ b/packages/core/lib/actions/fs/link/index.js @@ -1,9 +1,9 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require('./schema.json'); +import dedent from 'dedent'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { path } }) { // Set default if (config.mode == null) { @@ -51,7 +51,7 @@ module.exports = { mode: config.mode, }); } else { - exists = await this.call( + const exists = await this.call( { $raw_output: true, }, diff --git a/packages/core/lib/actions/fs/link/schema.json b/packages/core/lib/actions/fs/link/schema.json index 59c9a44e8..853f2f9e1 100644 --- a/packages/core/lib/actions/fs/link/schema.json +++ b/packages/core/lib/actions/fs/link/schema.json @@ -15,7 +15,7 @@ "description": "Create an executable file with an `exec` command." }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chmod#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/base/chmod#/definitions/config/properties/mode" } }, "required": [ diff --git a/packages/core/lib/actions/fs/mkdir/index.js b/packages/core/lib/actions/fs/mkdir/index.js index ecad3c97c..d56036e9a 100644 --- a/packages/core/lib/actions/fs/mkdir/index.js +++ b/packages/core/lib/actions/fs/mkdir/index.js @@ -1,6 +1,6 @@ // Dependencies -const utils = require('../../../utils'); -const definitions = require('./schema.json'); +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; // Errors const errors = { @@ -17,7 +17,7 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log, path }, ssh }) { if (!ssh && (config.cwd === true || !config.cwd)) { // Configuration validation diff --git a/packages/core/lib/actions/fs/mkdir/schema.json b/packages/core/lib/actions/fs/mkdir/schema.json index 40e5e1f9c..9877af69b 100644 --- a/packages/core/lib/actions/fs/mkdir/schema.json +++ b/packages/core/lib/actions/fs/mkdir/schema.json @@ -20,10 +20,10 @@ "description": "Overwrite permissions on the target directory. By default,\npermissions on only set on directory creation. It does not impact\nthe parent directory permissions." }, "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/gid" }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chmod#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/chmod#/definitions/config/properties/mode" }, "parent": { "oneOf": [ @@ -34,13 +34,13 @@ "type": "object", "properties": { "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/mkdir#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/core/actions/fs/mkdir#/definitions/config/properties/gid" }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/mkdir#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/mkdir#/definitions/config/properties/mode" }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/mkdir#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/mkdir#/definitions/config/properties/uid" } } } @@ -52,7 +52,7 @@ "description": "Location of the directory to create." }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/core/lib/actions/fs/move/index.js b/packages/core/lib/actions/fs/move/index.js index df09ade55..cb66d69d7 100644 --- a/packages/core/lib/actions/fs/move/index.js +++ b/packages/core/lib/actions/fs/move/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Exports -module.exports = { +export default { handler: async function ({ config, tools: { log, path } }) { const { exists } = await this.fs.base.exists(config.target); if (!exists) { diff --git a/packages/core/lib/actions/fs/remove/index.js b/packages/core/lib/actions/fs/remove/index.js index 538d9446e..f2ea50e6e 100644 --- a/packages/core/lib/actions/fs/remove/index.js +++ b/packages/core/lib/actions/fs/remove/index.js @@ -1,9 +1,9 @@ // Dependencies -const utils = require('../../../utils'); -const definitions = require('./schema.json'); +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; // Exports -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { // Start real work const { files } = await this.fs.glob(config.target); diff --git a/packages/core/lib/actions/fs/wait/index.js b/packages/core/lib/actions/fs/wait/index.js index eb34a9f5b..515e1e881 100644 --- a/packages/core/lib/actions/fs/wait/index.js +++ b/packages/core/lib/actions/fs/wait/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Exports -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { let status = false; // Validate parameters @@ -29,7 +29,7 @@ module.exports = { }, hooks: { on_action: { - after: "@nikitajs/core/lib/plugins/metadata/argument_to_config", + after: "@nikitajs/core/plugins/metadata/argument_to_config", handler: function ({ config }) { if (typeof config.target === "string") { return (config.target = [config.target]); diff --git a/packages/core/lib/actions/ssh/close/index.js b/packages/core/lib/actions/ssh/close/index.js index 0c0e91c41..ba8ef8569 100644 --- a/packages/core/lib/actions/ssh/close/index.js +++ b/packages/core/lib/actions/ssh/close/index.js @@ -1,10 +1,10 @@ // Dependencies -const connect = require('ssh2-connect'); -const utils = require('../../../utils'); -const definitions = require('./schema.json'); +import connect from 'ssh2-connect'; +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: function({config, siblings}) { if (config.ssh == null) { config.ssh = siblings.map( ({output}) => diff --git a/packages/core/lib/actions/ssh/open/README.md b/packages/core/lib/actions/ssh/open/README.md index 4b0089c43..aa70d73de 100644 --- a/packages/core/lib/actions/ssh/open/README.md +++ b/packages/core/lib/actions/ssh/open/README.md @@ -57,7 +57,7 @@ provided for conveniency and is often used to pass `ssh` information when initializing the session. ```js -require('nikita')({ +nikita({ ssh: { host: 'localhost', user: 'my_account', diff --git a/packages/core/lib/actions/ssh/open/index.js b/packages/core/lib/actions/ssh/open/index.js index 0d5d93481..c5c310813 100644 --- a/packages/core/lib/actions/ssh/open/index.js +++ b/packages/core/lib/actions/ssh/open/index.js @@ -1,29 +1,31 @@ // Dependencies -const connect = require('ssh2-connect'); -const fs = require('fs').promises; -const utils = require('../../../utils'); -const definitions = require('./schema.json'); +import connect from "ssh2-connect"; +import fs from "node:fs/promises"; +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { - handler: async function({ config, tools: {log} }) { +export default { + handler: async function ({ config, tools: { log } }) { if (!(config.private_key || config.password || config.private_key_path)) { // Validate authentication - throw utils.error('NIKITA_SSH_OPEN_NO_AUTH_METHOD_FOUND', ['unable to authenticate the SSH connection,', 'one of the "private_key", "password", "private_key_path"', 'configuration properties must be provided']); + throw utils.error("NIKITA_SSH_OPEN_NO_AUTH_METHOD_FOUND", [ + "unable to authenticate the SSH connection,", + 'one of the "private_key", "password", "private_key_path"', + "configuration properties must be provided", + ]); } // Read private key if option is a path if (!config.private_key && !config.password) { log({ message: `Read Private Key from: ${config.private_key_path}`, - level: 'DEBUG' + level: "DEBUG", }); const location = await utils.tilde.normalize(config.private_key_path); try { - ({ - data: config.private_key - } = (await fs.readFile(location, 'ascii'))); + ({ data: config.private_key } = await fs.readFile(location, "ascii")); } catch (error) { - if (error.code === 'ENOENT') { + if (error.code === "ENOENT") { throw Error(`Private key doesnt exists: ${JSON.stringify(location)}`); } throw error; @@ -33,20 +35,20 @@ module.exports = { // Establish connection log({ message: `Read Private Key: ${JSON.stringify(config.private_key_path)}`, - level: 'DEBUG' + level: "DEBUG", }); const conn = await connect(config); log({ message: "Connection is established", - level: 'INFO' + level: "INFO", }); return { - ssh: conn + ssh: conn, }; } catch (error) { log({ message: "Connection failed", - level: 'WARN' + level: "WARN", }); // Continue to bootstrap root access } @@ -54,24 +56,27 @@ module.exports = { if (config.root.username) { log({ message: "Bootstrap Root Access", - level: 'INFO' + level: "INFO", }); await this.ssh.root(config.root); } log({ message: "Establish Connection: attempt after enabling root access", - level: 'DEBUG' - }); - return await this.call({ - $retry: 3 - }, async function() { - return { - ssh: await connect(config) - }; + level: "DEBUG", }); + return await this.call( + { + $retry: 3, + }, + async function () { + return { + ssh: await connect(config), + }; + } + ); }, hooks: { - on_action: function({config}) { + on_action: function ({ config }) { if (config.private_key == null) { config.private_key = config.privateKey; } @@ -92,9 +97,9 @@ module.exports = { if (config.root.port == null) { config.root.port = config.port; } - } + }, }, metadata: { - definitions: definitions - } + definitions: definitions, + }, }; diff --git a/packages/core/lib/actions/ssh/open/schema.json b/packages/core/lib/actions/ssh/open/schema.json index f8fc3c527..f82032022 100644 --- a/packages/core/lib/actions/ssh/open/schema.json +++ b/packages/core/lib/actions/ssh/open/schema.json @@ -38,7 +38,7 @@ "description": "Local file location of the private key used to authenticate the user\nand create the SSH connection. It is only used if `password` and\n`private_key` are not provided." }, "root": { - "$ref": "module://@nikitajs/core/lib/actions/ssh/root", + "$ref": "module://@nikitajs/core/actions/ssh/root", "description": "Configuration passed to `nikita.ssh.root` to enable password-less root\nlogin." }, "username": { diff --git a/packages/core/lib/actions/ssh/root/index.js b/packages/core/lib/actions/ssh/root/index.js index d69b4a56f..a7ff48c3d 100644 --- a/packages/core/lib/actions/ssh/root/index.js +++ b/packages/core/lib/actions/ssh/root/index.js @@ -1,15 +1,14 @@ // Dependencies -const fs = require('fs/promises'); -const dedent = require('dedent'); -const connect = require('ssh2-connect'); -const exec = require('ssh2-exec'); -const utils = require('../../../utils'); -const definitions = require('./schema.json'); +import fs from "node:fs/promises"; +import dedent from "dedent"; +import connect from "ssh2-connect"; +import exec from "ssh2-exec"; +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { - handler: async function({metadata, config, tools: {log}}) { - var err, ref; +export default { + handler: async function ({ metadata, config, tools: { log } }) { if (config.host == null) { config.host = config.ip; } @@ -24,9 +23,14 @@ module.exports = { config.selinux = false; } if (config.selinux === true) { - config.selinux = 'permissive'; + config.selinux = "permissive"; } - if (config.selinux && ((ref = config.selinux) !== 'enforcing' && ref !== 'permissive' && ref !== 'disabled')) { + if ( + config.selinux && + config.selinux !== "enforcing" && + config.selinux !== "permissive" && + config.selinux !== "disabled" + ) { // Validation throw Error(`Invalid option \"selinux\": ${config.selinux}`); } @@ -35,48 +39,38 @@ module.exports = { if (config.public_key_path && !config.public_key) { const location = await utils.tilde.normalize(config.public_key_path); try { - ({ - data: config.public_key - } = (await fs.readFile(location, 'ascii'))); + ({ data: config.public_key } = await fs.readFile(location, "ascii")); } catch (error) { - err = error; - if (err.code === 'ENOENT') { + if (error.code === "ENOENT") { throw Error(`Private key doesnt exists: ${JSON.stringify(location)}`); } - throw err; + throw error; } } // Read private key if option is a path if (config.private_key_path && !config.private_key) { log({ message: `Read Private Key: ${JSON.stringify(config.private_key_path)}`, - level: 'DEBUG' + level: "DEBUG", }); const location = await utils.tilde.normalize(config.private_key_path); try { - ({ - data: config.private_key - } = (await fs.readFile(location, 'ascii'))); + ({ data: config.private_key } = await fs.readFile(location, "ascii")); } catch (error) { - err = error; - if (err.code === 'ENOENT') { + if (error.code === "ENOENT") { throw Error(`Private key doesnt exists: ${JSON.stringify(location)}`); } - throw err; + throw error; } } - await this.call(async function() { - log({ - message: "Connecting", - level: 'DEBUG' - }); - const conn = !metadata.dry ? (await connect(config)) : null; - log({ - message: "Connected", - level: 'INFO' - }); + await this.call(async function () { + log("DEBUG", "Opening connection"); + const conn = !metadata.dry ? await connect(config) : null; + log("INFO", "Connection establish"); let command = []; - command.push(`sed -i.back 's/.*PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config;`); + command.push( + `sed -i.back 's/.*PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config;` + ); if (config.public_key) { command.push(dedent` mkdir -p /root/.ssh; chmod 700 /root/.ssh; @@ -85,7 +79,7 @@ module.exports = { } command.push(dedent` sed -i.back 's/.*PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config; - selinux="${config.selinux || ''}"; + selinux="${config.selinux || ""}"; if [ -n "$selinux" ] && [ -f /etc/selinux/config ] && grep ^SELINUX="$selinux" /etc/selinux/config; then sed -i.back "s/^SELINUX=enforcing/SELINUX=$selinux/" /etc/selinux/config; @@ -93,15 +87,15 @@ module.exports = { exit 2; fi; `); - command = command.join('\n'); - if (config.username !== 'root') { - command = command.replace(/\n/g, ' '); - if (typeof config.command === 'function') { + command = command.join("\n"); + if (config.username !== "root") { + command = command.replace(/\n/g, " "); + if (typeof config.command === "function") { command = config.command(command); - } else if (typeof config.command === 'string') { + } else if (typeof config.command === "string") { command = `${config.command} ${command}`; } else { - config.command = 'sudo '; + config.command = "sudo "; if (config.user) { config.command += `-u ${config.user} `; } @@ -114,50 +108,53 @@ module.exports = { } log({ message: "Enable Root Access", - level: 'DEBUG' + level: "DEBUG", }); log({ message: command, - type: 'stdin' + type: "stdin", }); if (!metadata.dry) { - const child = exec({ - ssh: conn, - command: command - }, (error) => { - if (error?.code === 2) { - log({ - message: "Root Access Enabled", - level: "WARN", - }); - rebooting = true; - } else { - throw error; + const child = exec( + { + ssh: conn, + command: command, + }, + (error) => { + if (error?.code === 2) { + log("WARN", "Root Access Enabled"); + rebooting = true; + } else { + throw error; + } } - }); + ); child.stdout.on("data", (data) => log({ message: data, type: "stdout" }) ); - child.stdout.on("end", (data) => + child.stdout.on("end", () => log({ message: null, type: "stdout" }) ); child.stderr.on("data", (data) => log({ message: data, type: "stderr" }) ); - child.stderr.on("end", (data) => + child.stderr.on("end", () => log({ message: null, type: "stderr" }) ); } }); - await this.call({ - $if: rebooting, - $retry: true, - $sleep: 3000 - }, async function() { - (await connect(config)).end(); - }); + await this.call( + { + $if: rebooting, + $retry: true, + $sleep: 3000, + }, + async function () { + (await connect(config)).end(); + } + ); }, metadata: { - definitions: definitions - } + definitions: definitions, + }, }; diff --git a/packages/core/lib/actions/wait/index.js b/packages/core/lib/actions/wait/index.js index 8740bc64e..c306b3f44 100644 --- a/packages/core/lib/actions/wait/index.js +++ b/packages/core/lib/actions/wait/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: function ({ config }) { return new Promise(function (resolve) { return setTimeout(resolve, config.time); diff --git a/packages/core/lib/index.js b/packages/core/lib/index.js index 98d578112..88b8a8eeb 100644 --- a/packages/core/lib/index.js +++ b/packages/core/lib/index.js @@ -5,50 +5,86 @@ This is the main Nikita entry point. It expose a function to initialize a new Nikita session. */ -require("./register"); -const session = require("./session"); +import "@nikitajs/core/register"; +import { with_options as session } from "@nikitajs/core/session"; -const create = (...args) => - session.with_options(args, { +import args from "@nikitajs/core/plugins/args"; +import metadataArgumentToConfig from "@nikitajs/core/plugins/metadata/argument_to_config"; +import assertions from "@nikitajs/core/plugins/assertions"; +import assertionsExists from "@nikitajs/core/plugins/assertions/exists"; +import conditions from "@nikitajs/core/plugins/conditions"; +import conditionsExecute from "@nikitajs/core/plugins/conditions/execute"; +import conditionsExists from "@nikitajs/core/plugins/conditions/exists"; +import conditionsOs from "@nikitajs/core/plugins/conditions/os"; +import global from "@nikitajs/core/plugins/global"; +import history from "@nikitajs/core/plugins/history"; +import magicDollar from "@nikitajs/core/plugins/magic_dollar"; +import metadataDebug from "@nikitajs/core/plugins/metadata/debug"; +import disabled from "@nikitajs/core/plugins/metadata/disabled"; +import metadataExecute from "@nikitajs/core/plugins/metadata/execute"; +import metadataHeader from "@nikitajs/core/plugins/metadata/header"; +import metadataPosition from "@nikitajs/core/plugins/metadata/position"; +import metadataRaw from "@nikitajs/core/plugins/metadata/raw"; +import metadataRelax from "@nikitajs/core/plugins/metadata/relax"; +import metadataRetry from "@nikitajs/core/plugins/metadata/retry"; +import metadataSchema from "@nikitajs/core/plugins/metadata/schema"; +import metadataTime from "@nikitajs/core/plugins/metadata/time"; +import metadataTmpdir from "@nikitajs/core/plugins/metadata/tmpdir"; +import metadataUuid from "@nikitajs/core/plugins/metadata/uuid"; +import outputLogs from "@nikitajs/core/plugins/output/logs"; +import outputStatus from "@nikitajs/core/plugins/output/status"; +import pubsub from "@nikitajs/core/plugins/pubsub/index"; +import ssh from "@nikitajs/core/plugins/ssh"; +import templated from "@nikitajs/core/plugins/templated"; +import toolsDig from "@nikitajs/core/plugins/tools/dig"; +import toolsEvents from "@nikitajs/core/plugins/tools/events"; +import toolsFind from "@nikitajs/core/plugins/tools/find"; +import toolsLog from "@nikitajs/core/plugins/tools/log"; +import toolsPath from "@nikitajs/core/plugins/tools/path"; +import toolsSchema from "@nikitajs/core/plugins/tools/schema"; +import toolsWalk from "@nikitajs/core/plugins/tools/walk"; + +const create = (..._args) => + session(_args, { plugins: [ - require("./plugins/args"), - require("./plugins/metadata/argument_to_config"), - require("./plugins/assertions"), - require("./plugins/assertions/exists"), - require("./plugins/conditions"), - require("./plugins/conditions/execute"), - require("./plugins/conditions/exists"), - require("./plugins/conditions/os"), - require("./plugins/global"), - require("./plugins/history"), - require("./plugins/magic_dollar"), - require("./plugins/metadata/debug"), - require("./plugins/metadata/disabled"), - require("./plugins/metadata/execute"), - require("./plugins/metadata/header"), - require("./plugins/metadata/position"), - require("./plugins/metadata/raw"), - require("./plugins/metadata/relax"), - require("./plugins/metadata/retry"), - require("./plugins/metadata/schema"), - require("./plugins/metadata/time"), - require("./plugins/metadata/tmpdir"), - require("./plugins/metadata/uuid"), - require("./plugins/output/logs"), - require("./plugins/output/status"), - require("./plugins/pubsub"), - require("./plugins/ssh"), - require("./plugins/templated"), - require("./plugins/tools/dig"), - require("./plugins/tools/events"), - require("./plugins/tools/find"), - require("./plugins/tools/log"), - require("./plugins/tools/path"), - require("./plugins/tools/schema"), - require("./plugins/tools/walk"), + args, + metadataArgumentToConfig, + assertions, + assertionsExists, + conditions, + conditionsExecute, + conditionsExists, + conditionsOs, + global, + history, + magicDollar, + metadataDebug, + disabled, + metadataExecute, + metadataHeader, + metadataPosition, + metadataRaw, + metadataRelax, + metadataRetry, + metadataSchema, + metadataTime, + metadataTmpdir, + metadataUuid, + outputLogs, + outputStatus, + pubsub, + ssh, + templated, + toolsDig, + toolsEvents, + toolsFind, + toolsLog, + toolsPath, + toolsSchema, + toolsWalk, ], }); -module.exports = new Proxy(create, { +export default new Proxy(create, { get: (_, name) => create()[name], }); diff --git a/packages/core/lib/plugins/args.js b/packages/core/lib/plugins/args.js index 7080b3ccc..4d6670317 100644 --- a/packages/core/lib/plugins/args.js +++ b/packages/core/lib/plugins/args.js @@ -1,23 +1,20 @@ - /* The `args` plugin place the original argument into the action "args" property. */ -const utils = require('../utils'); - // Plugin -module.exports = { - name: '@nikitajs/core/lib/plugins/args', +export default { + name: "@nikitajs/core/plugins/args", hooks: { - 'nikita:arguments': { - handler: function({args, child}, handler) { + "nikita:arguments": { + handler: function ({ args, child }, handler) { // return handler is args.length is 0 # nikita is called without any args, eg `nikita.call(...)` // Erase all arguments to re-inject them later // return null if args.length is 1 and args[0]?.args if (child?.metadata?.raw_input) { arguments[0].args = [{}]; } - return function() { + return function () { const action = handler.apply(null, arguments); // If raw_input is activated, just pass arguments as is // Always one action since arguments are erased @@ -28,10 +25,10 @@ module.exports = { action.args = args; return action; }; - } + }, }, - 'nikita:normalize': function(action, handler) { - return async function() { + "nikita:normalize": function (action, handler) { + return async function () { // Prevent arguments to move into config by normalize const args = action.args; delete action.args; @@ -39,6 +36,6 @@ module.exports = { action.args = args; return action; }; - } - } + }, + }, }; diff --git a/packages/core/lib/plugins/assertions/index.js b/packages/core/lib/plugins/assertions.js similarity index 92% rename from packages/core/lib/plugins/assertions/index.js rename to packages/core/lib/plugins/assertions.js index 3c5049235..e2eb93cae 100644 --- a/packages/core/lib/plugins/assertions/index.js +++ b/packages/core/lib/plugins/assertions.js @@ -1,6 +1,6 @@ -const session = require('../../session'); -const utils = require('../../utils'); +import session from '@nikitajs/core/session'; +import utils from '@nikitajs/core/utils'; const handlers = { assert: async function(action, error, output) { @@ -65,11 +65,11 @@ const handlers = { } }; -module.exports = { - name: '@nikitajs/core/lib/plugins/assertions', +export default { + name: '@nikitajs/core/plugins/assertions', require: [ - '@nikitajs/core/lib/plugins/metadata/raw', - '@nikitajs/core/lib/plugins/metadata/disabled' + '@nikitajs/core/plugins/metadata/raw', + '@nikitajs/core/plugins/metadata/disabled' ], hooks: { 'nikita:normalize': function(action, handler) { diff --git a/packages/core/lib/plugins/assertions/exists.js b/packages/core/lib/plugins/assertions/exists.js index 45c61e5cf..855fac42b 100644 --- a/packages/core/lib/plugins/assertions/exists.js +++ b/packages/core/lib/plugins/assertions/exists.js @@ -1,15 +1,15 @@ /* -# Plugin `@nikitajs/core/lib/plugins/assertions/exists` +# Plugin `@nikitajs/core/plugins/assertions/exists` Assert that a file exist. The plugin register two action properties, `$assert_exists` and `$unassert_exists`. */ -const session = require('../../session'); -const utils = require('../../utils'); -const {mutate} = require('mixme'); +import session from '@nikitajs/core/session'; +import utils from '@nikitajs/core/utils'; +import {mutate} from 'mixme'; const handlers = { assert_exists: async function(action) { @@ -52,17 +52,17 @@ const handlers = { } }; -module.exports = { - name: '@nikitajs/core/lib/plugins/assertions/exists', +export default { + name: '@nikitajs/core/plugins/assertions/exists', require: [ - '@nikitajs/core/lib/plugins/metadata/raw', - '@nikitajs/core/lib/plugins/metadata/disabled' + '@nikitajs/core/plugins/metadata/raw', + '@nikitajs/core/plugins/metadata/disabled' ], hooks: { 'nikita:normalize': { // This is hanging, no time for investigation // after: [ - // '@nikitajs/core/lib/plugins/assertions' + // '@nikitajs/core/plugins/assertions' // ] handler: function(action, handler) { // Ventilate assertions properties defined at root diff --git a/packages/core/lib/plugins/conditions/index.js b/packages/core/lib/plugins/conditions.js similarity index 88% rename from packages/core/lib/plugins/conditions/index.js rename to packages/core/lib/plugins/conditions.js index 772e9383d..d0faa12d2 100644 --- a/packages/core/lib/plugins/conditions/index.js +++ b/packages/core/lib/plugins/conditions.js @@ -1,5 +1,5 @@ -const session = require('../../session'); +import session from '@nikitajs/core/session'; const handlers = { if: async function(action) { @@ -84,9 +84,9 @@ const handlers = { } }; -module.exports = { - name: '@nikitajs/core/lib/plugins/conditions', - require: ['@nikitajs/core/lib/plugins/metadata/raw', '@nikitajs/core/lib/plugins/metadata/disabled'], +export default { + name: '@nikitajs/core/plugins/conditions', + require: ['@nikitajs/core/plugins/metadata/raw', '@nikitajs/core/plugins/metadata/disabled'], hooks: { 'nikita:normalize': { handler: function(action, handler) { @@ -96,9 +96,9 @@ module.exports = { let value = action.metadata[property]; if (/^(if|unless)($|_[\w_]+$)/.test(property)) { if (conditions[property]) { - throw Error('CONDITIONS_DUPLICATED_DECLARATION', [ + throw Error("CONDITIONS_DUPLICATED_DECLARATION", [ `Property ${property} is defined multiple times,`, - 'at the root of the action and inside conditions' + "at the root of the action and inside conditions", ]); } if (!Array.isArray(value)) { @@ -116,8 +116,8 @@ module.exports = { } }, 'nikita:action': { - before: '@nikitajs/core/lib/plugins/metadata/disabled', - after: '@nikitajs/core/lib/plugins/templated', + before: '@nikitajs/core/plugins/metadata/disabled', + after: '@nikitajs/core/plugins/templated', handler: async function(action) { let final_run = true; for (const condition in action.conditions) { diff --git a/packages/core/lib/plugins/conditions/execute.js b/packages/core/lib/plugins/conditions/execute.js index be0f8b841..7e8b3a559 100644 --- a/packages/core/lib/plugins/conditions/execute.js +++ b/packages/core/lib/plugins/conditions/execute.js @@ -1,9 +1,7 @@ -const session = require('../../session'); +import session from '@nikitajs/core/session'; -const - -handlers = { +const handlers = { if_execute: async function(action) { let final_run = true; for (const condition of action.conditions.if_execute) { @@ -69,13 +67,13 @@ handlers = { } }; -module.exports = { - name: '@nikitajs/core/lib/plugins/conditions/execute', - require: ['@nikitajs/core/lib/plugins/conditions'], +export default { + name: '@nikitajs/core/plugins/conditions/execute', + require: ['@nikitajs/core/plugins/conditions'], hooks: { 'nikita:action': { - after: '@nikitajs/core/lib/plugins/conditions', - before: '@nikitajs/core/lib/plugins/metadata/disabled', + after: '@nikitajs/core/plugins/conditions', + before: '@nikitajs/core/plugins/metadata/disabled', handler: async function(action) { let final_run = true; for (const condition in action.conditions) { diff --git a/packages/core/lib/plugins/conditions/exists.js b/packages/core/lib/plugins/conditions/exists.js index bf18805dc..78c7cfc2c 100644 --- a/packages/core/lib/plugins/conditions/exists.js +++ b/packages/core/lib/plugins/conditions/exists.js @@ -1,5 +1,5 @@ -const session = require('../../session'); +import session from '@nikitajs/core/session'; const handlers = { if_exists: async function(action, value) { @@ -47,13 +47,13 @@ const handlers = { } }; -module.exports = { - name: '@nikitajs/core/lib/plugins/conditions/exists', - require: ['@nikitajs/core/lib/plugins/conditions'], +export default { + name: '@nikitajs/core/plugins/conditions/exists', + require: ['@nikitajs/core/plugins/conditions'], hooks: { 'nikita:action': { - after: '@nikitajs/core/lib/plugins/conditions', - before: '@nikitajs/core/lib/plugins/metadata/disabled', + after: '@nikitajs/core/plugins/conditions', + before: '@nikitajs/core/plugins/metadata/disabled', handler: async function(action) { let final_run = true; for (const condition in action.conditions) { diff --git a/packages/core/lib/plugins/conditions/os.js b/packages/core/lib/plugins/conditions/os.js index 97cf6840c..918a545ba 100644 --- a/packages/core/lib/plugins/conditions/os.js +++ b/packages/core/lib/plugins/conditions/os.js @@ -1,7 +1,6 @@ -const session = require('../../session'); -const utils = require('../../utils'); -const exec = require('ssh2-exec'); +import session from '@nikitajs/core/session'; +import utils from '@nikitajs/core/utils'; const handlers = { if_os: async function(action) { @@ -10,7 +9,18 @@ const handlers = { $bastard: true, $parent: action }, async function() { - const {$status, stdout} = await this.execute(utils.os.command); + const { $status, stdout } = await this.execute(utils.os.command).catch( + (error) => { + if (error.exit_code === 2) { + throw utils.error("NIKITA_PLUGIN_OS_UNSUPPORTED_DISTRIB", [ + "your current distribution is not yet listed,", + "please report to us,", + `it name is ${JSON.stringify(error.stdout)}`, + ]); + } + throw error; + } + ); if (!$status) { return final_run = false; } @@ -83,9 +93,18 @@ const handlers = { $bastard: true, $parent: action }, async function() { - const {$status, stdout} = await this.execute({ - command: utils.os.command - }); + const { $status, stdout } = await this.execute(utils.os.command).catch( + (error) => { + if (error.exit_code === 2) { + throw utils.error("NIKITA_PLUGIN_OS_UNSUPPORTED_DISTRIB", [ + "your current distribution is not yet listed,", + "please report to us,", + `it name is ${JSON.stringify(error.stdout)}`, + ]); + } + throw error; + } + ); if (!$status) { return final_run = false; } @@ -148,12 +167,12 @@ const handlers = { } }; -module.exports = { - name: '@nikitajs/core/lib/plugins/conditions/os', - require: ['@nikitajs/core/lib/plugins/conditions'], +export default { + name: '@nikitajs/core/plugins/conditions/os', + require: ['@nikitajs/core/plugins/conditions'], hooks: { 'nikita:normalize': { - after: '@nikitajs/core/lib/plugins/conditions', + after: '@nikitajs/core/plugins/conditions', handler: function(action, handler) { return async function() { var condition, config, i, j, len, len1, ref; @@ -203,8 +222,8 @@ module.exports = { } }, 'nikita:action': { - after: '@nikitajs/core/lib/plugins/conditions', - before: '@nikitajs/core/lib/plugins/metadata/disabled', + after: '@nikitajs/core/plugins/conditions', + before: '@nikitajs/core/plugins/metadata/disabled', handler: async function(action) { var final_run, k, local_run, ref, v; final_run = true; diff --git a/packages/core/lib/plugins/global.js b/packages/core/lib/plugins/global.js index 74782988e..b2b1a15c8 100644 --- a/packages/core/lib/plugins/global.js +++ b/packages/core/lib/plugins/global.js @@ -1,4 +1,3 @@ - /* The `global` plugin look it the parent tree for a "global" configuration. If found, it will merge its value with the current configuration. @@ -11,21 +10,19 @@ daemon if it is not run locally. */ // Plugin -module.exports = { - name: '@nikitajs/core/lib/plugins/global', - require: [ - '@nikitajs/core/lib/plugins/tools/find' - ], +export default { + name: "@nikitajs/core/plugins/global", + require: ["@nikitajs/core/plugins/tools/find"], hooks: { - 'nikita:action': { - handler: async function(action) { + "nikita:action": { + handler: async function (action) { const global = action.metadata.global; if (!global) { return action; } - action.config[global] = (await action.tools.find(function({config}) { + action.config[global] = await action.tools.find(function ({ config }) { return config[global]; - })); + }); for (const k in action.config[global]) { const v = action.config[global][k]; if (action.config[k] == null) { @@ -34,7 +31,7 @@ module.exports = { } delete action.config[global]; return action; - } - } - } + }, + }, + }, }; diff --git a/packages/core/lib/plugins/history.js b/packages/core/lib/plugins/history.js index af4d53ec1..7c1480513 100644 --- a/packages/core/lib/plugins/history.js +++ b/packages/core/lib/plugins/history.js @@ -1,15 +1,14 @@ - /* -# Plugin `@nikitajs/core/lib/plugins/history` +# Plugin `@nikitajs/core/plugins/history` The history plugin fill the `children`, `siblings`, and `sibling` properties. */ // Plugin -module.exports = { - name: '@nikitajs/core/lib/plugins/history', +export default { + name: "@nikitajs/core/plugins/history", hooks: { - 'nikita:normalize': function(action) { + "nikita:normalize": function (action) { action.children = []; if (action.siblings == null) { action.siblings = []; @@ -21,7 +20,7 @@ module.exports = { action.sibling = action.siblings.slice(-1)[0]; } }, - 'nikita:result': function({action, error, output}) { + "nikita:result": function ({ action, error, output }) { if (!action.parent) { return; } @@ -35,8 +34,8 @@ module.exports = { metadata: action.metadata, config: action.config, error: error, - output: output + output: output, }); - } - } + }, + }, }; diff --git a/packages/core/lib/plugins/magic_dollar.js b/packages/core/lib/plugins/magic_dollar.js index a39e5471d..be222537e 100644 --- a/packages/core/lib/plugins/magic_dollar.js +++ b/packages/core/lib/plugins/magic_dollar.js @@ -1,34 +1,30 @@ - /* -# Plugin `@nikitajs/core/lib/plugins/magic_dollar` +# Plugin `@nikitajs/core/plugins/magic_dollar` The `magic_dollar` plugin extract all variables starting with a dollar sign. */ -// Dependencies -const {is_object_literal} = require('mixme'); - // Plugin -module.exports = { - name: '@nikitajs/core/lib/plugins/magic_dollar', +export default { + name: "@nikitajs/core/plugins/magic_dollar", hooks: { - 'nikita:normalize': { - handler: function(action) { + "nikita:normalize": { + handler: function (action) { const results = []; for (const k in action) { const v = action[k]; - if (k[0] !== '$') { + if (k[0] !== "$") { continue; } const prop = k.substr(1); switch (prop) { - case 'handler': + case "handler": action.handler = v; break; - case 'parent': + case "parent": action.parent = v; break; - case 'scheduler': + case "scheduler": action.scheduler = v; break; default: @@ -37,7 +33,7 @@ module.exports = { results.push(delete action[k]); } return results; - } - } - } + }, + }, + }, }; diff --git a/packages/core/lib/plugins/metadata/argument_to_config.js b/packages/core/lib/plugins/metadata/argument_to_config.js index 928098fa3..a6d4da479 100644 --- a/packages/core/lib/plugins/metadata/argument_to_config.js +++ b/packages/core/lib/plugins/metadata/argument_to_config.js @@ -1,16 +1,16 @@ /* -# Plugin `@nikitajs/core/lib/plugins/metadata/argument_to_config` +# Plugin `@nikitajs/core/plugins/metadata/argument_to_config` The `argument` plugin map an argument which is not an object into a configuration property. */ // Dependencies -const {mutate} = require('mixme'); +import {mutate} from 'mixme'; // Plugin -module.exports = { - name: '@nikitajs/core/lib/plugins/metadata/argument_to_config', +export default { + name: '@nikitajs/core/plugins/metadata/argument_to_config', hooks: { 'nikita:schema': function({schema}) { mutate(schema.definitions.metadata.properties, { @@ -21,7 +21,7 @@ module.exports = { }); }, 'nikita:action': { - before: ['@nikitajs/core/lib/plugins/metadata/schema'], + before: ['@nikitajs/core/plugins/metadata/schema'], handler: function(action) { if (action.metadata.argument_to_config && action.config[action.metadata.argument_to_config] === undefined) { action.config[action.metadata.argument_to_config] = action.metadata.argument; diff --git a/packages/core/lib/plugins/metadata/audit.js b/packages/core/lib/plugins/metadata/audit.js new file mode 100644 index 000000000..0b1b4437f --- /dev/null +++ b/packages/core/lib/plugins/metadata/audit.js @@ -0,0 +1,193 @@ +/** + * # Plugin `@nikitajs/core/plugins/metadata/debug` + * + * Print the time execution of the child actions. + * + */ + +// Dependencies +import stream from "node:stream"; +import dedent from "dedent"; +import pad from "pad"; +import { mutate } from "mixme"; +import { string } from "@nikitajs/core/utils" + +// Utils +const chars = { + horizontal: "─", + upper_left: "┌", + vertical: "│", + vertical_right: "├", +}; +const branches = (record) => { + const depth = record.depth - record.rootDepth; + const position = record.position.slice(record.rootDepth+1) + const branches = []; + for (let i = 0; i < depth - (record.type ? 0 : 1); i++) { + if (position[i] === 0) { + branches.push(" "); + } else { + branches.push(`${chars.vertical} `); + } + } + return branches.join(''); +} +const leaf = (record) => { + return `${record.index === 0 ? chars.upper_left : chars.vertical_right}${chars.horizontal} `; +} +const bullet = (record) => { + return `- `; +} +const print_log = (ws, record) => { + const depth = record.depth - record.rootDepth; + console.log(record, '>', branches(record)) + let msg = + typeof record.message === 'string' + ? record.message.trim() + : typeof record.message === 'number' + ? record.message + : record.message?.toString != null + ? record.message.toString().trim() + : JSON.stringify(record.message); + const elements = [ + pad(`[${record.level}]`, 8), + depth > 0 && branches(record), + depth > 0 && leaf(record), + // bullet(record), + ' ', + msg, + ].filter(Boolean) + if(ws.isTTY){ + let out = elements.join('') + out = (function() { + switch (record.type) { + case 'stdin': + return `\x1b[33m${out}\x1b[39m`; + case 'stdout_stream': + return `\x1b[36m${out}\x1b[39m`; + case 'stderr_stream': + return `\x1b[35m${out}\x1b[39m`; + default: + return `\x1b[32m${out}\x1b[39m`; + } + })(); + ws.write(`${out}\n`) + }else{ + let out = elements.join('') + ws.write(`${out}\n`); + } +} +const print = (ws, record) => { + const depth = record.depth - record.rootDepth; + const elements = [ + pad("[AUDIT]", 8), + depth > 0 && branches(record), + depth > 0 && leaf(record), + record.name, + ].filter(Boolean); + if(ws.isTTY){ + let out = elements.join(''); + out += pad(ws.columns - msg.length, record.time); + out = `\x1b[33m${msg}\x1b[39m`; + ws.write(`${out}\n`) + }else{ + elements.push(' ', record.time); + let out = elements.join(''); + ws.write(`${out}\n`); + } +} + +// Plugin +export default { + name: "@nikitajs/core/plugins/metadata/audit", + require: "@nikitajs/core/plugins/tools/log", + hooks: { + "nikita:schema": function ({ schema }) { + mutate(schema.definitions.metadata.properties, { + audit: { + oneOf: [ + { + type: "string", + enum: ["stdout", "stderr"], + }, + { + type: "boolean", + }, + { + instanceof: "stream.Writable", + }, + ], + description: dedent` + Print the time execution of the child actions. + `, + }, + }); + }, + "nikita:action": { + after: ["@nikitajs/core/plugins/metadata/schema"], + handler: function (action) { + if (!action.metadata.audit) { + return; + } + // Print child actions + let audit = action.metadata.audit; + const rootDepth = action.metadata.depth + audit = action.metadata.audit = { + ws: + audit === "stdout" + ? process.stdout + : audit === "stderr" + ? process.stderr + : audit instanceof stream.Writable + ? audit + : process.stderr, + listener_end: function ({ action }) { + print(audit.ws, { + name: action.metadata.namespace?.join('.') || action.module, + depth: action.metadata.depth, + index: action.metadata.index, + position: action.metadata.position, + rootDepth: rootDepth, + time: string.print_time(action.metadata.time_end - action.metadata.time_start), + }, action); + }, + listener: function(log) { + print_log(audit.ws, { + name: log.namespace?.join('.') || log.module, + depth: log.depth, + index: log.index, + level: log.level, + message: log.message, + position: log.position, + rootDepth: rootDepth, + time: undefined, + }); + }, + }; + action.tools.events.addListener("nikita:action:end", audit.listener_end); + action.tools.events.addListener('text', audit.listener); + action.tools.events.addListener('stdin', audit.listener); + action.tools.events.addListener('stdout_stream', audit.listener); + action.tools.events.addListener('stderr_stream', audit.listener); + }, + }, + "nikita:result": { + after: "@nikitajs/core/plugins/metadata/time", + handler: function ({ action }) { + const audit = action.metadata.audit; + if (!(audit && audit.listener)) { + return; + } + print(audit.ws, { + name: action.metadata.namespace?.join('.') || action.module, + index: action.metadata.index, + depth: action.metadata.depth, + position: [], + rootDepth: action.metadata.depth, + time: string.print_time(action.metadata.time_end - action.metadata.time_start), + }, action); + action.tools.events.removeListener("nikita:action:end", audit.listener_end); + }, + }, + }, +}; diff --git a/packages/core/lib/plugins/metadata/debug.js b/packages/core/lib/plugins/metadata/debug.js index 9d12cc811..e94dec549 100644 --- a/packages/core/lib/plugins/metadata/debug.js +++ b/packages/core/lib/plugins/metadata/debug.js @@ -1,6 +1,6 @@ /* -# Plugin `@nikitajs/core/lib/plugins/metadata/debug` +# Plugin `@nikitajs/core/plugins/metadata/debug` Print log information to the console. @@ -10,15 +10,14 @@ TODO: detect/force isTTY */ // Dependencies -const dedent = require('dedent'); -const utils = require('../../utils'); -const stream = require('stream'); -const {mutate} = require('mixme'); +import dedent from 'dedent'; +import stream from 'stream'; +import {mutate} from 'mixme'; // Plugin -module.exports = { - name: '@nikitajs/core/lib/plugins/metadata/debug', - require: '@nikitajs/core/lib/plugins/tools/log', +export default { + name: '@nikitajs/core/plugins/metadata/debug', + require: '@nikitajs/core/plugins/tools/log', hooks: { 'nikita:schema': function({schema}) { mutate(schema.definitions.metadata.properties, { @@ -44,7 +43,7 @@ module.exports = { }); }, 'nikita:action': { - after: ['@nikitajs/core/lib/plugins/metadata/schema'], + after: ['@nikitajs/core/plugins/metadata/schema'], handler: function(action) { if (!action.metadata.debug) { return; @@ -53,12 +52,12 @@ module.exports = { debug = action.metadata.debug = { ws: debug === 'stdout' - ? action.metadata.debug.ws = process.stdout + ? process.stdout : debug === 'stderr' - ? action.metadata.debug.ws = process.stderr + ? process.stderr : debug instanceof stream.Writable - ? action.metadata.debug.ws = debug - : action.metadata.debug.ws = process.stderr, + ? debug + : process.stderr, listener: function(log) { if(['stdout_stream', 'stderr_stream'].includes(log.type) && log.message == null){ return diff --git a/packages/core/lib/plugins/metadata/disabled.js b/packages/core/lib/plugins/metadata/disabled.js index e6f925bc0..ea44b9ab2 100644 --- a/packages/core/lib/plugins/metadata/disabled.js +++ b/packages/core/lib/plugins/metadata/disabled.js @@ -1,6 +1,5 @@ - /* -# Plugin `@nikitajs/core/lib/plugins/metadata/disabled` +# Plugin `@nikitajs/core/plugins/metadata/disabled` Desactivate the execution of an action. @@ -9,25 +8,28 @@ When a plugin a disabled, chances are that not all its property where passed correctly and we don't want schema validation to throw an error in such cases */ -const dedent = require('dedent') -const {mutate} = require('mixme'); -module.exports = { - name: '@nikitajs/core/lib/plugins/metadata/disabled', +// Dependencies +import dedent from "dedent"; +import { mutate } from "mixme"; + +// Plugin +export default { + name: "@nikitajs/core/plugins/metadata/disabled", hooks: { - 'nikita:schema': function({schema}) { + "nikita:schema": function ({ schema }) { mutate(schema.definitions.metadata.properties, { disabled: { - type: 'boolean', + type: "boolean", description: dedent` Disable the execution of the current action and consequently the execution of its child actions. `, - default: false - } + default: false, + }, }); }, - 'nikita:action': function(action, handler) { + "nikita:action": function (action, handler) { if (action.metadata.disabled == null) { action.metadata.disabled = false; } @@ -36,6 +38,6 @@ module.exports = { } else { return handler; } - } - } + }, + }, }; diff --git a/packages/core/lib/plugins/metadata/execute.js b/packages/core/lib/plugins/metadata/execute.js index 10197bd27..8a7df634c 100644 --- a/packages/core/lib/plugins/metadata/execute.js +++ b/packages/core/lib/plugins/metadata/execute.js @@ -1,75 +1,62 @@ - /* -# Plugin `@nikitajs/core/lib/plugins/execute` +# Plugin `@nikitajs/core/plugins/execute` Convert the execute configuration properties into metadata which are inherited from parent actions. */ -const {merge, mutate} = require('mixme'); +import { merge, mutate } from "mixme"; -module.exports = { - name: '@nikitajs/core/lib/plugins/execute', +export default { + name: "@nikitajs/core/plugins/execute", require: [ - '@nikitajs/core/lib/plugins/tools/find', - '@nikitajs/core/lib/plugins/tools/walk' + "@nikitajs/core/plugins/tools/find", + "@nikitajs/core/plugins/tools/walk", ], hooks: { - 'nikita:schema': function({schema}) { + "nikita:schema": function ({ schema }) { mutate(schema.definitions.metadata.properties, { sudo: { - type: 'boolean', - description: `Run the action with as the superuser.` - } + type: "boolean", + description: `Run the action with as the superuser.`, + }, }); }, - 'nikita:action': { - handler: async function({ - config, - metadata, - tools: {find, walk} - }) { - if (metadata.module !== '@nikitajs/core/lib/actions/execute') { + "nikita:action": { + handler: async function ({ config, metadata, tools: { find, walk } }) { + if (metadata.module !== "@nikitajs/core/actions/execute") { return; } if (config.arch_chroot == null) { config.arch_chroot = await find( - ({metadata}) => metadata.arch_chroot + ({ metadata }) => metadata.arch_chroot ); } if (config.arch_chroot_rootdir == null) { config.arch_chroot_rootdir = await find( - ({metadata}) => metadata.arch_chroot_rootdir + ({ metadata }) => metadata.arch_chroot_rootdir ); } if (config.bash == null) { - config.bash = await find( - ({metadata}) => metadata.bash - ); + config.bash = await find(({ metadata }) => metadata.bash); } if (config.dry == null) { - config.dry = await find( - ({metadata}) => metadata.dry - ); + config.dry = await find(({ metadata }) => metadata.dry); } const env = merge( config.env, - ...(await walk(({metadata}) => metadata.env)) + ...(await walk(({ metadata }) => metadata.env)) ); if (Object.keys(env).length) { config.env = env; } if (config.env_export == null) { - config.env_export = await find( - ({metadata}) => metadata.env_export - ); + config.env_export = await find(({ metadata }) => metadata.env_export); } if (config.sudo == null) { - config.sudo = await find( - ({metadata}) => metadata.sudo - ); + config.sudo = await find(({ metadata }) => metadata.sudo); } - } - } - } + }, + }, + }, }; diff --git a/packages/core/lib/plugins/metadata/header.js b/packages/core/lib/plugins/metadata/header.js index 701e7d92d..fceab5e82 100644 --- a/packages/core/lib/plugins/metadata/header.js +++ b/packages/core/lib/plugins/metadata/header.js @@ -1,15 +1,16 @@ /* -# Plugin `@nikitajs/core/lib/plugins/metadata/header` +# Plugin `@nikitajs/core/plugins/metadata/header` The `header` plugin validate the metadata `header` property against the schema. */ -var mutate; -({mutate} = require('mixme')); +// Dependencies +import {mutate} from 'mixme'; -module.exports = { - name: '@nikitajs/core/lib/plugins/metadata/header', +// Plugin +export default { + name: '@nikitajs/core/plugins/metadata/header', hooks: { 'nikita:schema': function({schema}) { mutate(schema.definitions.metadata.properties, { diff --git a/packages/core/lib/plugins/metadata/position.js b/packages/core/lib/plugins/metadata/position.js index 092ffeab4..bda1c8661 100644 --- a/packages/core/lib/plugins/metadata/position.js +++ b/packages/core/lib/plugins/metadata/position.js @@ -1,12 +1,20 @@ -const dedent = require('dedent'); -const {mutate} = require('mixme'); -const utils = require('../../utils'); +/* +# Plugin `@nikitajs/core/plugins/metadata/position` -module.exports = { - name: '@nikitajs/core/lib/plugins/metadata/position', +Insert the metadata properties `depth`, `index` and `position` to each action. +*/ + +// Dependencies +import dedent from 'dedent'; +import {mutate} from 'mixme'; +import utils from '@nikitajs/core/utils'; + +// Plugin +export default { + name: '@nikitajs/core/plugins/metadata/position', require: [ - '@nikitajs/core/lib/plugins/history' + '@nikitajs/core/plugins/history' ], hooks: { 'nikita:schema': function({schema}) { @@ -44,7 +52,7 @@ module.exports = { }); }, 'nikita:normalize': { - after: '@nikitajs/core/lib/plugins/history', + after: '@nikitajs/core/plugins/history', handler: function(action) { action.metadata.depth = action.parent ? action.parent.metadata.depth + 1 : 0; // plugins are not activated in the root session with {depth: 0} diff --git a/packages/core/lib/plugins/metadata/raw.js b/packages/core/lib/plugins/metadata/raw.js index 11ac5b180..5dedd03ac 100644 --- a/packages/core/lib/plugins/metadata/raw.js +++ b/packages/core/lib/plugins/metadata/raw.js @@ -1,52 +1,52 @@ - /* -# Plugin `@nikitajs/core/lib/plugins/metadata/raw` +# Plugin `@nikitajs/core/plugins/metadata/raw` Affect the normalization of input and output properties. */ // Dependencies -const dedent = require('dedent'); -const {mutate} = require('mixme'); +import dedent from "dedent"; +import { mutate } from "mixme"; -module.exports = { - name: '@nikitajs/core/lib/plugins/metadata/raw', +// Plugin +export default { + name: "@nikitajs/core/plugins/metadata/raw", hooks: { - 'nikita:schema': function({schema}) { + "nikita:schema": function ({ schema }) { mutate(schema.definitions.metadata.properties, { raw: { - type: 'boolean', + type: "boolean", description: dedent` Indicates the level number of the action in the Nikita session tree. `, default: false, - readOnly: true + readOnly: true, }, raw_input: { - type: 'boolean', + type: "boolean", description: dedent` Indicates the index of an action relative to its sibling actions in the Nikita session tree. `, - readOnly: true + readOnly: true, }, raw_output: { - type: 'boolean', + type: "boolean", description: dedent` Indicates the position of the action relative to its parent and sibling action. It is unique to each action. `, - readOnly: true - } + readOnly: true, + }, }); }, - 'nikita:registry:normalize': { - handler: function(action) { + "nikita:registry:normalize": { + handler: function (action) { if (action.metadata == null) { action.metadata = {}; } - const wasBoolean = typeof action.metadata.raw === 'boolean'; + const wasBoolean = typeof action.metadata.raw === "boolean"; if (action.metadata.raw == null) { action.metadata.raw = false; } @@ -58,11 +58,11 @@ module.exports = { action.metadata.raw_output = action.metadata.raw; } } - } + }, }, - 'nikita:action': { - handler: function(action) { - const wasBoolean = typeof action.metadata.raw === 'boolean'; + "nikita:action": { + handler: function (action) { + const wasBoolean = typeof action.metadata.raw === "boolean"; if (action.metadata.raw == null) { action.metadata.raw = false; } @@ -74,7 +74,7 @@ module.exports = { action.metadata.raw_output = action.metadata.raw; } } - } - } - } + }, + }, + }, }; diff --git a/packages/core/lib/plugins/metadata/relax.js b/packages/core/lib/plugins/metadata/relax.js index 1b477f803..c933e2dac 100644 --- a/packages/core/lib/plugins/metadata/relax.js +++ b/packages/core/lib/plugins/metadata/relax.js @@ -1,32 +1,39 @@ +import utils from "@nikitajs/core/utils"; -const utils = require('../../utils'); - -module.exports = { - name: '@nikitajs/core/lib/plugins/metadata/relax', +export default { + name: "@nikitajs/core/plugins/metadata/relax", hooks: { - 'nikita:action': function(action, handler) { + "nikita:action": function (action, handler) { if (action.metadata.relax == null) { action.metadata.relax = false; } - if (typeof action.metadata.relax === 'string' || action.metadata.relax instanceof RegExp) { + if ( + typeof action.metadata.relax === "string" || + action.metadata.relax instanceof RegExp + ) { action.metadata.relax = [action.metadata.relax]; } - if (!(typeof action.metadata.relax === 'boolean' || action.metadata.relax instanceof Array)) { - throw utils.error('METADATA_RELAX_INVALID_VALUE', [ + if ( + !( + typeof action.metadata.relax === "boolean" || + action.metadata.relax instanceof Array + ) + ) { + throw utils.error("METADATA_RELAX_INVALID_VALUE", [ "configuration `relax` expects a boolean, string, array or regexp", - `value, got ${JSON.stringify(action.metadata.relax)}.` + `value, got ${JSON.stringify(action.metadata.relax)}.`, ]); } return handler; }, - 'nikita:result': function(args) { + "nikita:result": function (args) { if (!args.action.metadata.relax) { return; } if (!args.error) { return; } - if (args.error.code === 'METADATA_RELAX_INVALID_VALUE') { + if (args.error.code === "METADATA_RELAX_INVALID_VALUE") { return; } if ( @@ -40,6 +47,6 @@ module.exports = { args.output.error = args.error; args.error = undefined; } - } - } + }, + }, }; diff --git a/packages/core/lib/plugins/metadata/retry.js b/packages/core/lib/plugins/metadata/retry.js index a28979c6e..a1b9808d7 100644 --- a/packages/core/lib/plugins/metadata/retry.js +++ b/packages/core/lib/plugins/metadata/retry.js @@ -1,15 +1,15 @@ /* -# @nikitajs/core/lib/plugins/metadata/retry +# @nikitajs/core/plugins/metadata/retry Reschedule the execution of an action on error. */ -const {merge} = require('mixme'); -const utils = require('../../utils'); +import {merge} from 'mixme'; +import utils from '@nikitajs/core/utils'; -module.exports = { - name: '@nikitajs/core/lib/plugins/metadata/retry', +export default { + name: '@nikitajs/core/plugins/metadata/retry', hooks: { 'nikita:action': function(action, handler) { if (action.metadata.attempt == null) { diff --git a/packages/core/lib/plugins/metadata/schema.js b/packages/core/lib/plugins/metadata/schema.js index 725c61a54..f41c7e380 100644 --- a/packages/core/lib/plugins/metadata/schema.js +++ b/packages/core/lib/plugins/metadata/schema.js @@ -1,6 +1,6 @@ /* -# Plugin `@nikitajs/core/lib/plugins/metadata/schema` +# Plugin `@nikitajs/core/plugins/metadata/schema` The plugin enrich the config object with default values defined in the JSON schema. Thus, it mst be defined after every module which modify the config @@ -8,14 +8,14 @@ object. */ // Dependencies -const dedent = require('dedent'); -const {mutate} = require('mixme'); -const utils = require('../../utils'); +import dedent from 'dedent'; +import {mutate} from 'mixme'; -module.exports = { - name: '@nikitajs/core/lib/plugins/metadata/schema', +// Plugin +export default { + name: '@nikitajs/core/plugins/metadata/schema', require: [ - '@nikitajs/core/lib/plugins/tools/schema' + '@nikitajs/core/plugins/tools/schema' ], hooks: { 'nikita:schema': function({schema}) { @@ -31,8 +31,8 @@ module.exports = { }, 'nikita:action': { after: [ - '@nikitajs/core/lib/plugins/global', - '@nikitajs/core/lib/plugins/metadata/disabled' + '@nikitajs/core/plugins/global', + '@nikitajs/core/plugins/metadata/disabled' ], handler: async function(action) { if (action.metadata.schema === false) { diff --git a/packages/core/lib/plugins/metadata/time.js b/packages/core/lib/plugins/metadata/time.js index 2dd490e79..81d212f6d 100644 --- a/packages/core/lib/plugins/metadata/time.js +++ b/packages/core/lib/plugins/metadata/time.js @@ -1,22 +1,23 @@ /* -# Plugin `@nikitajs/core/lib/plugins/metadata/time` +# Plugin `@nikitajs/core/plugins/metadata/time` The time plugin create two metadata properties, `time_start` and `time_end`. */ -module.exports = { - name: '@nikitajs/core/lib/plugins/metadata/time', +// Plugin +export default { + name: "@nikitajs/core/plugins/metadata/time", hooks: { - 'nikita:action': { - handler: function(action) { + "nikita:action": { + handler: function (action) { action.metadata.time_start = Date.now(); - } + }, }, - 'nikita:result': { - before: '@nikitajs/core/lib/plugins/history', - handler: function({action}) { + "nikita:result": { + before: "@nikitajs/core/plugins/history", + handler: function ({ action }) { action.metadata.time_end = Date.now(); - } - } - } + }, + }, + }, }; diff --git a/packages/core/lib/plugins/metadata/tmpdir.js b/packages/core/lib/plugins/metadata/tmpdir.js index bea1c8dcd..445fb6829 100644 --- a/packages/core/lib/plugins/metadata/tmpdir.js +++ b/packages/core/lib/plugins/metadata/tmpdir.js @@ -1,17 +1,14 @@ +import { is_object_literal } from "mixme"; +import utils from "@nikitajs/core/utils"; +import os from "os"; +import fs from "ssh2-fs"; +import exec from "ssh2-exec/promises"; -const {is_object_literal} = require('mixme'); -const {mutate} = require('mixme'); -const utils = require('../../utils'); -const os = require('os'); -const process = require('process'); -const fs = require('ssh2-fs'); -const exec = require('ssh2-exec/promise'); - -module.exports = { - name: '@nikitajs/core/lib/plugins/metadata/tmpdir', +export default { + name: "@nikitajs/core/plugins/metadata/tmpdir", require: [ - '@nikitajs/core/lib/plugins/tools/find', - '@nikitajs/core/lib/plugins/tools/path' + "@nikitajs/core/plugins/tools/find", + "@nikitajs/core/plugins/tools/path", ], hooks: { // 'nikita:schema': ({schema}) -> @@ -26,15 +23,28 @@ module.exports = { // Creates a temporary directory for the duration of the action // execution. // ''' - 'nikita:action': { - before: ['@nikitajs/core/lib/plugins/templated'], - after: ['@nikitajs/core/lib/plugins/execute', '@nikitajs/core/lib/plugins/ssh', '@nikitajs/core/lib/plugins/tools/path', '@nikitajs/core/lib/plugins/metadata/uuid'], + "nikita:action": { + before: ["@nikitajs/core/plugins/templated"], + after: [ + "@nikitajs/core/plugins/execute", + "@nikitajs/core/plugins/ssh", + "@nikitajs/core/plugins/tools/path", + "@nikitajs/core/plugins/metadata/uuid", + ], // Probably related to pb above - // '@nikitajs/core/lib/plugins/metadata/schema' - handler: async function(action) { - const {config, metadata, tools} = action; - if (!['boolean', 'function', 'string', 'undefined'].includes(typeof metadata.tmpdir) && !is_object_literal(metadata.tmpdir)) { - throw utils.error('METADATA_TMPDIR_INVALID', ['the "tmpdir" metadata value must be a boolean, a function, an object or a string,', `got ${JSON.stringify(metadata.tmpdir)}`]); + // '@nikitajs/core/plugins/metadata/schema' + handler: async function (action) { + const { config, metadata, tools } = action; + if ( + !["boolean", "function", "string", "undefined"].includes( + typeof metadata.tmpdir + ) && + !is_object_literal(metadata.tmpdir) + ) { + throw utils.error("METADATA_TMPDIR_INVALID", [ + 'the "tmpdir" metadata value must be a boolean, a function, an object or a string,', + `got ${JSON.stringify(metadata.tmpdir)}`, + ]); } // tmpdir is explicit, it must be defined to be available as a metadata // wether we switch with sudo or ssh, if not defined, there is nothing to do @@ -42,37 +52,40 @@ module.exports = { return; } // SSH connection extraction - const ssh = config.ssh === false - ? undefined - : await tools.find((action) => action.ssh); + const ssh = + config.ssh === false + ? undefined + : await tools.find((action) => action.ssh); // Sudo extraction - const sudo = await tools.find(({metadata}) => metadata.sudo); + const sudo = await tools.find(({ metadata }) => metadata.sudo); // Generate temporary location - const os_tmpdir = ssh ? '/tmp' : os.tmpdir(); + const os_tmpdir = ssh ? "/tmp" : os.tmpdir(); const ssh_hash = ssh ? utils.ssh.hash(ssh) : null; - const tmp_hash = utils.string.hash(JSON.stringify({ - ssh_hash: ssh_hash, - sudo: sudo, - uuid: metadata.uuid - })); - const tmpdir_info = await (async function() { + const tmp_hash = utils.string.hash( + JSON.stringify({ + ssh_hash: ssh_hash, + sudo: sudo, + uuid: metadata.uuid, + }) + ); + const tmpdir_info = await (async function () { switch (typeof metadata.tmpdir) { - case 'string': + case "string": return { - target: metadata.tmpdir + target: metadata.tmpdir, }; - case 'boolean': + case "boolean": return { - target: 'nikita-' + tmp_hash, - hash: tmp_hash + target: "nikita-" + tmp_hash, + hash: tmp_hash, }; - case 'function': - return (await metadata.tmpdir.call(null, { + case "function": + return await metadata.tmpdir.call(null, { action: action, os_tmpdir: os_tmpdir, - tmpdir: 'nikita-' + tmp_hash - })); - case 'object': + tmpdir: "nikita-" + tmp_hash, + }); + case "object": // metadata.tmpdir.target ?= 'nikita-'+tmp_hash return metadata.tmpdir; default: @@ -96,18 +109,20 @@ module.exports = { tmpdir_info.hash = utils.string.hash(JSON.stringify(tmpdir_info)); } if (tmpdir_info.target == null) { - tmpdir_info.target = 'nikita-' + tmpdir_info.hash; + tmpdir_info.target = "nikita-" + tmpdir_info.hash; } tmpdir_info.target = tools.path.resolve(os_tmpdir, tmpdir_info.target); metadata.tmpdir = tmpdir_info.target; - const exists = action.parent && await tools.find(action.parent, function({metadata}) { - if (!metadata.tmpdir) { - return; - } - if (tmpdir_info.hash === metadata.tmpdir_info?.hash) { - return true; - } - }); + const exists = + action.parent && + (await tools.find(action.parent, function ({ metadata }) { + if (!metadata.tmpdir) { + return; + } + if (tmpdir_info.hash === metadata.tmpdir_info?.hash) { + return true; + } + })); if (exists) { return; } @@ -118,37 +133,41 @@ module.exports = { } metadata.tmpdir_info = tmpdir_info; } catch (error) { - if (error.code !== 'EEXIST') { + if (error.code !== "EEXIST") { throw error; } } - } + }, }, - 'nikita:result': { - before: '@nikitajs/core/lib/plugins/ssh', - handler: async function({action}) { - const {config, metadata, tools} = action; + "nikita:result": { + before: "@nikitajs/core/plugins/ssh", + handler: async function ({ action }) { + const { config, metadata, tools } = action; // Value of tmpdir could still be true if there was an error in // one of the on_action hook, such as a invalid schema validation - if (typeof metadata.tmpdir !== 'string') { + if (typeof metadata.tmpdir !== "string") { return; } if (!metadata.tmpdir_info) { return; } - if (await tools.find(({metadata}) => metadata.dirty)) { + if (await tools.find(({ metadata }) => metadata.dirty)) { return; } // SSH connection extraction - const ssh = config.ssh === false - ? undefined - : await tools.find(action, (action) => action.ssh); + const ssh = + config.ssh === false + ? undefined + : await tools.find(action, (action) => action.ssh); // Temporary directory decommissioning - await exec(ssh, [ - metadata.tmpdir_info.sudo ? 'sudo' : undefined, - `rm -r '${metadata.tmpdir}'` - ].join(' ')); - } - } - } + await exec( + ssh, + [ + metadata.tmpdir_info.sudo ? "sudo" : undefined, + `rm -r '${metadata.tmpdir}'`, + ].join(" ") + ); + }, + }, + }, }; diff --git a/packages/core/lib/plugins/metadata/uuid.js b/packages/core/lib/plugins/metadata/uuid.js index 823d6fd26..770239814 100644 --- a/packages/core/lib/plugins/metadata/uuid.js +++ b/packages/core/lib/plugins/metadata/uuid.js @@ -1,13 +1,16 @@ /* -# Plugin '@nikitajs/core/lib/plugins/metadata/uuid' +# Plugin '@nikitajs/core/plugins/metadata/uuid' Identify each action with a unique identifier. */ -const {v4: uuid} = require('uuid'); -module.exports = { - name: '@nikitajs/core/lib/plugins/metadata/uuid', +// Dependencies +import {v4 as uuid} from 'uuid'; + +// Plugin +export default { + name: '@nikitajs/core/plugins/metadata/uuid', hooks: { 'nikita:action': { handler: function(action) { diff --git a/packages/core/lib/plugins/output/logs.js b/packages/core/lib/plugins/output/logs.js index 27a36a2d7..425301313 100644 --- a/packages/core/lib/plugins/output/logs.js +++ b/packages/core/lib/plugins/output/logs.js @@ -1,23 +1,23 @@ /* -# Plugin `@nikitajs/core/lib/plugins/output/logs` +# Plugin `@nikitajs/core/plugins/output/logs` Return events emitted inside the action. */ -const {is_object_literal} = require('mixme'); -const stackTrace = require('stack-trace'); -const path = require('path'); +import {is_object_literal} from 'mixme'; +import stackTrace from 'stack-trace'; +import path from 'node:path'; -module.exports = { - name: '@nikitajs/core/lib/plugins/output/logs', +export default { + name: '@nikitajs/core/plugins/output/logs', require: [ - '@nikitajs/core/lib/plugins/tools/log', - '@nikitajs/core/lib/plugins/output/status', - '@nikitajs/core/lib/plugins/metadata/raw' + '@nikitajs/core/plugins/tools/log', + '@nikitajs/core/plugins/output/status', + '@nikitajs/core/plugins/metadata/raw' ], hooks: { 'nikita:action': { - after: '@nikitajs/core/lib/plugins/tools/log', + after: '@nikitajs/core/plugins/tools/log', handler: function(action) { action.state.logs = []; action.tools.log = (function(fn) { @@ -41,7 +41,7 @@ module.exports = { } }, 'nikita:result': { - after: '@nikitajs/core/lib/plugins/output/status', + after: '@nikitajs/core/plugins/output/status', handler: function({action, output}, handler) { if (action.metadata.raw_output) { return handler; diff --git a/packages/core/lib/plugins/output/status.js b/packages/core/lib/plugins/output/status.js index abe71e4c8..05b09e508 100644 --- a/packages/core/lib/plugins/output/status.js +++ b/packages/core/lib/plugins/output/status.js @@ -1,15 +1,15 @@ -const {is_object, is_object_literal} = require('mixme'); -const utils = require('../../utils'); +import {is_object, is_object_literal} from 'mixme'; +import utils from '@nikitajs/core/utils'; -module.exports = { - name: '@nikitajs/core/lib/plugins/output/status', - require: ['@nikitajs/core/lib/plugins/history'], +export default { + name: '@nikitajs/core/plugins/output/status', + require: ['@nikitajs/core/plugins/history'], recommand: [ // Status is set to `false` when action is disabled - '@nikitajs/core/lib/plugins/metadata/disabled', + '@nikitajs/core/plugins/metadata/disabled', // Honors raw_output if present - '@nikitajs/core/lib/plugins/metadata/raw' + '@nikitajs/core/plugins/metadata/raw' ], hooks: { // 'nikita:registry:normalize': (action) -> @@ -36,7 +36,7 @@ module.exports = { }; }, 'nikita:result': { - before: '@nikitajs/core/lib/plugins/history', + before: '@nikitajs/core/plugins/history', handler: function({action, error, output}) { // Honors the disabled plugin, status is `false` // when the action is disabled diff --git a/packages/core/lib/plugins/pubsub/engines/memory.js b/packages/core/lib/plugins/pubsub/engines/memory.js index b991b40e0..89786abcb 100644 --- a/packages/core/lib/plugins/pubsub/engines/memory.js +++ b/packages/core/lib/plugins/pubsub/engines/memory.js @@ -3,7 +3,7 @@ Default in-memory engine implementation. */ // Plugin -module.exports = function() { +export default function() { const store = {}; return { set: function(key, value) { diff --git a/packages/core/lib/plugins/pubsub/index.js b/packages/core/lib/plugins/pubsub/index.js index 2b927a29a..300cbc19c 100644 --- a/packages/core/lib/plugins/pubsub/index.js +++ b/packages/core/lib/plugins/pubsub/index.js @@ -1,14 +1,14 @@ /* -Plugin `@nikitajs/core/lib/plugins/pubsub` +Plugin `@nikitajs/core/plugins/pubsub` Provide a mechanism for actions to wait for a key to be published before continuing their execution. */ -module.exports = { - name: '@nikitajs/core/lib/plugins/pubsub', - require: '@nikitajs/core/lib/plugins/tools/find', +export default { + name: '@nikitajs/core/plugins/pubsub', + require: '@nikitajs/core/plugins/tools/find', hooks: { 'nikita:action': async function(action) { const engine = await action.tools.find( diff --git a/packages/core/lib/plugins/ssh.js b/packages/core/lib/plugins/ssh.js index 31c2098d4..36abae80c 100644 --- a/packages/core/lib/plugins/ssh.js +++ b/packages/core/lib/plugins/ssh.js @@ -1,48 +1,45 @@ - /* -# Plugin @nikitajs/core/lib/plugins/ssh +# Plugin @nikitajs/core/plugins/ssh Pass an SSH connection to an action. The connection is accessible with the `action.ssh` property. */ // Denpendencies -const {merge} = require('mixme'); -const utils = require('../utils'); -const session = require('../session'); +import utils from "@nikitajs/core/utils"; +import {with_options as session} from "@nikitajs/core/session"; // Nikita plugins -const events = require('./tools/events'); -const find = require('./tools/find'); -const log = require('./tools/log'); -const status = require('./output/status'); -const raw = require('./metadata/raw'); -const history = require('./history'); +import events from "@nikitajs/core/plugins/tools/events"; +import find from "@nikitajs/core/plugins/tools/find"; +import log from "@nikitajs/core/plugins/tools/log"; +import status from "@nikitajs/core/plugins/output/status"; +import raw from "@nikitajs/core/plugins/metadata/raw"; +import history from "@nikitajs/core/plugins/history"; // Plugin -module.exports = { - name: '@nikitajs/core/lib/plugins/ssh', - require: [ - '@nikitajs/core/lib/plugins/tools/find' - ], +export default { + name: "@nikitajs/core/plugins/ssh", + require: ["@nikitajs/core/plugins/tools/find"], hooks: { - 'nikita:action': async function(action) { + "nikita:action": async function (action) { // Is there a connection to open if (action.ssh && !utils.ssh.is(action.ssh)) { - let {ssh} = await session.with_options([{}], { - plugins: [ - events, find, log, - status, raw, history, - ] // Need to inject `tools.log` - }).ssh.open(action.ssh); + let { ssh } = await session([{}], { + plugins: [events, find, log, status, raw, history], // Need to inject `tools.log` + }) + .ssh.open(action.ssh); action.metadata.ssh_dispose = true; action.ssh = ssh; return; } // Find SSH connection in parent actions - let ssh = await action.tools.find( (action) => action.ssh ); + let ssh = await action.tools.find((action) => action.ssh); if (ssh) { if (!utils.ssh.is(ssh)) { - throw utils.error('NIKITA_SSH_INVALID_STATE', ['the `ssh` property is not a connection', `got ${JSON.stringify(ssh)}`]); + throw utils.error("NIKITA_SSH_INVALID_STATE", [ + "the `ssh` property is not a connection", + `got ${JSON.stringify(ssh)}`, + ]); } action.ssh = ssh; return; @@ -52,12 +49,16 @@ module.exports = { } return; } else if (ssh !== void 0) { - throw utils.error('NIKITA_SSH_INVALID_VALUE', ['when disabled, the `ssh` property must be `null` or `false`,', 'when enable, the `ssh` property must be a connection or a configuration object', `got ${JSON.stringify(ssh)}`]); + throw utils.error("NIKITA_SSH_INVALID_VALUE", [ + "when disabled, the `ssh` property must be `null` or `false`,", + "when enable, the `ssh` property must be a connection or a configuration object", + `got ${JSON.stringify(ssh)}`, + ]); } // Find SSH open in previous siblings for (let i = 0; i < action.siblings.length; i++) { const sibling = action.siblings[i]; - if (sibling.metadata.module !== '@nikitajs/core/lib/actions/ssh/open') { + if (sibling.metadata.module !== "@nikitajs/core/actions/ssh/open") { continue; } if (sibling.output.ssh) { @@ -70,14 +71,14 @@ module.exports = { action.ssh = ssh; } }, - 'nikita:result': async function({action}) { + "nikita:result": async function ({ action }) { if (action.metadata.ssh_dispose) { - return (await session.with_options([{}], { - plugins: [require('./tools/events'), require('./tools/find'), require('./tools/log'), require('./output/status'), require('./metadata/raw'), require('./history')] // Need to inject `tools.log` + return await session([{}], { + plugins: [events, history, find, log, raw, status], // Need to inject `tools.log` }).ssh.close({ - ssh: action.ssh - })); + ssh: action.ssh, + }); } - } - } + }, + }, }; diff --git a/packages/core/lib/plugins/templated.js b/packages/core/lib/plugins/templated.js index c66502bd0..8b2ae67d2 100644 --- a/packages/core/lib/plugins/templated.js +++ b/packages/core/lib/plugins/templated.js @@ -1,22 +1,21 @@ - /* -# Plugin `@nikitajs/core/lib/plugins/templated` +# Plugin `@nikitajs/core/plugins/templated` The templated plugin transform any string pass as an argument, for example a metadata or a configuration property, into a template. */ -const selfTemplated = require('self-templated'); +import selfTemplated from "self-templated"; -module.exports = { - name: '@nikitajs/core/lib/plugins/templated', +export default { + name: "@nikitajs/core/plugins/templated", hooks: { - 'nikita:action': { + "nikita:action": { // Note, conditions plugins define templated as a dependency - before: ['@nikitajs/core/lib/plugins/metadata/schema'], - handler: async function(action) { - const templated = await action.tools.find((action) => - action.metadata.templated + before: ["@nikitajs/core/plugins/metadata/schema"], + handler: async function (action) { + const templated = await action.tools.find( + (action) => action.metadata.templated ); if (templated !== true) { return; @@ -29,10 +28,10 @@ module.exports = { assertions: true, conditions: true, config: true, - metadata: true - } + metadata: true, + }, }); - } - } - } + }, + }, + }, }; diff --git a/packages/core/lib/plugins/tools/dig.js b/packages/core/lib/plugins/tools/dig.js index 042fc407f..3b340e7f0 100644 --- a/packages/core/lib/plugins/tools/dig.js +++ b/packages/core/lib/plugins/tools/dig.js @@ -1,6 +1,6 @@ /* -Plugin `@nikitajs/core/lib/plugins/tools/dig` +Plugin `@nikitajs/core/plugins/tools/dig` The plugin export a `dig` function which is used to traverse all the executed action prior to the current action. @@ -12,8 +12,8 @@ action. */ -const each = require('each'); -const utils = require('../../utils'); +import each from 'each'; +import utils from '@nikitajs/core/utils'; const dig_down = async function(action, digger) { const results = []; @@ -84,8 +84,8 @@ const validate = function(action, args) { return [action, finder]; }; -module.exports = { - name: '@nikitajs/core/lib/plugins/tools/dig', +export default { + name: '@nikitajs/core/plugins/tools/dig', hooks: { 'nikita:action': function(action) { // Register function diff --git a/packages/core/lib/plugins/tools/events.js b/packages/core/lib/plugins/tools/events.js index 22468b1fd..f3ba190fc 100644 --- a/packages/core/lib/plugins/tools/events.js +++ b/packages/core/lib/plugins/tools/events.js @@ -1,16 +1,15 @@ - /* -# Plugin `@nikitajs/core/lib/plugins/tools/event` +# Plugin `@nikitajs/core/plugins/tools/event` Expose the event object which implement the Node.js EventEmitter API. The event object is inhereted from parent actions and cascaded to children. */ -const {EventEmitter} = require('events'); +import { EventEmitter } from "events"; -module.exports = { - name: '@nikitajs/core/lib/plugins/tools/events', +export default { + name: "@nikitajs/core/plugins/tools/events", hooks: { - 'nikita:normalize': function(action) { + "nikita:normalize": function (action) { if (action.tools == null) { action.tools = {}; } @@ -18,39 +17,39 @@ module.exports = { ? action.parent.tools.events : new EventEmitter(); }, - 'nikita:action': function(action) { - action.tools.events.emit('nikita:action:start', { - action: action + "nikita:action": function (action) { + action.tools.events.emit("nikita:action:start", { + action: action, }); }, - 'nikita:result': { - after: '@nikitajs/core/lib/plugins/output/status', - handler: function({action, error, output}, handler) { - return async function({action}) { + "nikita:result": { + after: "@nikitajs/core/plugins/output/status", + handler: function ({ output }, handler) { + return async function ({ action }) { try { output = await handler.apply(null, arguments); - action.tools.events.emit('nikita:action:end', { + action.tools.events.emit("nikita:action:end", { action: action, error: undefined, - output: output + output: output, }); return output; } catch (error) { - action.tools.events.emit('nikita:action:end', { + action.tools.events.emit("nikita:action:end", { action: action, error: error, - output: undefined + output: undefined, }); throw error; } }; - } + }, + }, + "nikita:resolved": function ({ action }) { + action.tools.events.emit("nikita:resolved", ...arguments); }, - 'nikita:resolved': function({action}) { - action.tools.events.emit('nikita:resolved', ...arguments); + "nikita:rejected": function ({ action }) { + action.tools.events.emit("nikita:rejected", ...arguments); }, - 'nikita:rejected': function({action}) { - action.tools.events.emit('nikita:rejected', ...arguments); - } - } + }, }; diff --git a/packages/core/lib/plugins/tools/find.js b/packages/core/lib/plugins/tools/find.js index 282906ddc..b8cb4ed2e 100644 --- a/packages/core/lib/plugins/tools/find.js +++ b/packages/core/lib/plugins/tools/find.js @@ -1,13 +1,13 @@ /* -# Plugin `@nikitajs/core/lib/plugins/tools/find` +# Plugin `@nikitajs/core/plugins/tools/find` Traverse the parent hierarchy until it find a value. The traversal will only stop if the user function return anything else than `undefined`, including `null` or `false`. */ -const utils = require('../../utils'); +import utils from '@nikitajs/core/utils'; const find = async function(action, finder) { const precious = await finder(action, finder); @@ -53,8 +53,8 @@ const validate = function(action, args) { return [action, finder]; }; -module.exports = { - name: '@nikitajs/core/lib/plugins/tools/find', +export default { + name: '@nikitajs/core/plugins/tools/find', hooks: { 'nikita:normalize': function(action, handler) { return async function() { diff --git a/packages/core/lib/plugins/tools/log.js b/packages/core/lib/plugins/tools/log.js index 0261fe313..516ecc38a 100644 --- a/packages/core/lib/plugins/tools/log.js +++ b/packages/core/lib/plugins/tools/log.js @@ -1,6 +1,6 @@ /* -# Plugin `@nikitajs/core/lib/plugins/tools/log` +# Plugin `@nikitajs/core/plugins/tools/log` The `log` plugin inject a log function into the action.handler argument. @@ -10,17 +10,16 @@ time the `log` function is called with the `log`, `config` and `metadata` argume */ -const {EventEmitter} = require('events'); -const stackTrace = require('stack-trace'); -const path = require('path'); -const {is_object_literal, merge, mutate} = require('mixme'); -const utils = require('../../utils'); +import path from 'node:path'; +import stackTrace from 'stack-trace'; +import {is_object_literal, mutate} from 'mixme'; +import utils from '@nikitajs/core/utils'; -module.exports = { - name: '@nikitajs/core/lib/plugins/tools/log', +export default { + name: '@nikitajs/core/plugins/tools/log', require: [ - '@nikitajs/core/lib/plugins/tools/events', - '@nikitajs/core/lib/plugins/tools/find' + '@nikitajs/core/plugins/tools/events', + '@nikitajs/core/plugins/tools/find' ], hooks: { 'nikita:normalize': function(action) { @@ -29,7 +28,7 @@ module.exports = { } }, 'nikita:action': { - after: ['@nikitajs/core/lib/plugins/tools/events', '@nikitajs/core/lib/plugins/metadata/debug'], + after: ['@nikitajs/core/plugins/tools/events', '@nikitajs/core/plugins/metadata/debug'], handler: async function(action) { const debug = await action.tools.find(function(action) { return action.metadata.debug; diff --git a/packages/core/lib/plugins/tools/path.js b/packages/core/lib/plugins/tools/path.js index f66603ae8..9f9063a50 100644 --- a/packages/core/lib/plugins/tools/path.js +++ b/packages/core/lib/plugins/tools/path.js @@ -1,28 +1,35 @@ - /* -Plugin `@nikitajs/core/lib/plugins/tools/path` +Plugin `@nikitajs/core/plugins/tools/path` */ -const os = require('os'); -const path = require('path'); -module.exports = { - name: '@nikitajs/core/lib/plugins/tools/path', +// Dependencies +import os from "os"; +import path from "path"; + +// Plugin +export default { + name: "@nikitajs/core/plugins/tools/path", hooks: { - 'nikita:action': { - after: '@nikitajs/core/lib/plugins/ssh', - handler: function(action) { + "nikita:action": { + after: "@nikitajs/core/plugins/ssh", + handler: function (action) { if (action.tools == null) { action.tools = {}; } // Path is alwaws posix over ssh // otherwise it is platform dependent - action.tools.path = !action.ssh ? os.platform === 'win32' ? path.win32 : path.posix : path.posix; + action.tools.path = !action.ssh + ? os.platform === "win32" + ? path.win32 + : path.posix + : path.posix; // Local is agnostic of ssh - action.tools.path.local = os.platform === 'win32' ? path.win32 : path.posix; + action.tools.path.local = + os.platform === "win32" ? path.win32 : path.posix; // Reinject posix and win32 path for conveniency action.tools.path.posix = path.posix; - return action.tools.path.win32 = path.win32; - } - } - } + return (action.tools.path.win32 = path.win32); + }, + }, + }, }; diff --git a/packages/core/lib/plugins/tools/schema.js b/packages/core/lib/plugins/tools/schema.js index 3d537cf0d..457acee55 100644 --- a/packages/core/lib/plugins/tools/schema.js +++ b/packages/core/lib/plugins/tools/schema.js @@ -5,15 +5,14 @@ schema. Thus, it mst be defined after every module which modify the config object. */ -const stream = require('stream'); -const util = require('util'); -const dedent = require('dedent'); -const {merge, mutate} = require('mixme'); -const Ajv = require('ajv').default; -const ajv_keywords = require('ajv-keywords'); -const ajv_formats = require("ajv-formats"); -const utils = require('../../utils'); -const instanceofDef = require('ajv-keywords/dist/definitions/instanceof'); +import stream from 'node:stream'; +import dedent from 'dedent'; +import {merge, mutate} from 'mixme'; +import Ajv from 'ajv'; +import ajv_keywords from 'ajv-keywords'; +import ajv_formats from "ajv-formats"; +import utils from '@nikitajs/core/utils'; +import instanceofDef from 'ajv-keywords/dist/definitions/instanceof.js'; instanceofDef.CONSTRUCTORS['Error'] = Error; instanceofDef.CONSTRUCTORS['stream.Writable'] = stream.Writable; @@ -30,8 +29,8 @@ const parse = function(uri) { }; }; -module.exports = { - name: '@nikitajs/core/lib/plugins/tools/schema', +export default { + name: '@nikitajs/core/plugins/tools/schema', hooks: { 'nikita:normalize': { handler: async function(action) { @@ -65,14 +64,15 @@ module.exports = { switch (protocol) { case 'module:': try { - const act = await require.main.require(pathname); + const act = (await import(pathname)).default; return accept({ definitions: act.metadata.definitions }); } catch (error) { return reject(utils.error('NIKITA_SCHEMA_INVALID_MODULE', [ 'the module location is not resolvable,', - `module name is ${JSON.stringify(pathname)}.` + `module name is ${JSON.stringify(pathname)},`, + `error message is ${JSON.stringify(error.message)}.` ])); } break; @@ -229,7 +229,7 @@ module.exports = { action.metadata.namespace.length ? `action \`${action.metadata.namespace.join('.')}\`` : "root action", - action.metadata.namespace.join('.') === 'call' && action.metadata.module !== '@nikitajs/core/lib/actions/call' + action.metadata.namespace.join('.') === 'call' && action.metadata.module !== '@nikitajs/core/actions/call' ? ` in module ${action.metadata.module}` : undefined, ', ', @@ -249,7 +249,7 @@ module.exports = { action.metadata.namespace.length ? `action \`${action.metadata.namespace.join('.')}\`` : "root action", - action.metadata.namespace.join('.') === 'call' && action.metadata.module !== '@nikitajs/core/lib/actions/call' + action.metadata.namespace.join('.') === 'call' && action.metadata.module !== '@nikitajs/core/actions/call' ? ` in module ${action.metadata.module}` : undefined, ':', diff --git a/packages/core/lib/plugins/tools/walk.js b/packages/core/lib/plugins/tools/walk.js index ab4432c3d..e874b42dd 100644 --- a/packages/core/lib/plugins/tools/walk.js +++ b/packages/core/lib/plugins/tools/walk.js @@ -1,11 +1,10 @@ // Dependencies -const utils = require('../../utils'); +import utils from '@nikitajs/core/utils'; const walk = async function(action, walker) { - var precious, results; - precious = (await walker(action)); - results = []; + const precious = await walker(action); + const results = []; if (precious !== void 0) { results.push(precious); } @@ -48,8 +47,8 @@ const validate = function(action, args) { return [action, walker]; }; -module.exports = { - name: '@nikitajs/core/lib/plugins/tools/walk', +export default { + name: '@nikitajs/core/plugins/tools/walk', hooks: { 'nikita:normalize': function(action) { if (action.tools == null) { diff --git a/packages/core/lib/register.js b/packages/core/lib/register.js index 2ffc32cdb..54eb94d48 100644 --- a/packages/core/lib/register.js +++ b/packages/core/lib/register.js @@ -1,52 +1,52 @@ // Dependencies -const registry = require('./registry'); +import registry from '@nikitajs/core/registry'; // Action registration -module.exports = { +const actions = { '': { handler: (function() {}) }, - 'assert': '@nikitajs/core/lib/actions/assert', + 'assert': '@nikitajs/core/actions/assert', 'call': { - '': '@nikitajs/core/lib/actions/call' + '': '@nikitajs/core/actions/call' }, 'execute': { - '': '@nikitajs/core/lib/actions/execute', - 'assert': '@nikitajs/core/lib/actions/execute/assert', - 'wait': '@nikitajs/core/lib/actions/execute/wait' + '': '@nikitajs/core/actions/execute', + 'assert': '@nikitajs/core/actions/execute/assert', + 'wait': '@nikitajs/core/actions/execute/wait' }, 'fs': { 'base': { - 'chmod': '@nikitajs/core/lib/actions/fs/base/chmod', - 'chown': '@nikitajs/core/lib/actions/fs/base/chown', - 'copy': '@nikitajs/core/lib/actions/fs/base/copy', - 'createReadStream': '@nikitajs/core/lib/actions/fs/base/createReadStream', - 'createWriteStream': '@nikitajs/core/lib/actions/fs/base/createWriteStream', - 'exists': '@nikitajs/core/lib/actions/fs/base/exists', - 'lstat': '@nikitajs/core/lib/actions/fs/base/lstat', - 'mkdir': '@nikitajs/core/lib/actions/fs/base/mkdir', - 'readdir': '@nikitajs/core/lib/actions/fs/base/readdir', - 'readFile': '@nikitajs/core/lib/actions/fs/base/readFile', - 'readlink': '@nikitajs/core/lib/actions/fs/base/readlink', - 'rename': '@nikitajs/core/lib/actions/fs/base/rename', - 'rmdir': '@nikitajs/core/lib/actions/fs/base/rmdir', - 'stat': '@nikitajs/core/lib/actions/fs/base/stat', - 'symlink': '@nikitajs/core/lib/actions/fs/base/symlink', - 'unlink': '@nikitajs/core/lib/actions/fs/base/unlink', - 'writeFile': '@nikitajs/core/lib/actions/fs/base/writeFile' + 'chmod': '@nikitajs/core/actions/fs/base/chmod', + 'chown': '@nikitajs/core/actions/fs/base/chown', + 'copy': '@nikitajs/core/actions/fs/base/copy', + 'createReadStream': '@nikitajs/core/actions/fs/base/createReadStream', + 'createWriteStream': '@nikitajs/core/actions/fs/base/createWriteStream', + 'exists': '@nikitajs/core/actions/fs/base/exists', + 'lstat': '@nikitajs/core/actions/fs/base/lstat', + 'mkdir': '@nikitajs/core/actions/fs/base/mkdir', + 'readdir': '@nikitajs/core/actions/fs/base/readdir', + 'readFile': '@nikitajs/core/actions/fs/base/readFile', + 'readlink': '@nikitajs/core/actions/fs/base/readlink', + 'rename': '@nikitajs/core/actions/fs/base/rename', + 'rmdir': '@nikitajs/core/actions/fs/base/rmdir', + 'stat': '@nikitajs/core/actions/fs/base/stat', + 'symlink': '@nikitajs/core/actions/fs/base/symlink', + 'unlink': '@nikitajs/core/actions/fs/base/unlink', + 'writeFile': '@nikitajs/core/actions/fs/base/writeFile' }, - 'assert': '@nikitajs/core/lib/actions/fs/assert', - 'chmod': '@nikitajs/core/lib/actions/fs/chmod', - 'chown': '@nikitajs/core/lib/actions/fs/chown', - 'copy': '@nikitajs/core/lib/actions/fs/copy', - 'glob': '@nikitajs/core/lib/actions/fs/glob', - 'hash': '@nikitajs/core/lib/actions/fs/hash', - 'link': '@nikitajs/core/lib/actions/fs/link', - 'mkdir': '@nikitajs/core/lib/actions/fs/mkdir', - 'move': '@nikitajs/core/lib/actions/fs/move', - 'remove': '@nikitajs/core/lib/actions/fs/remove', - 'wait': '@nikitajs/core/lib/actions/fs/wait' + 'assert': '@nikitajs/core/actions/fs/assert', + 'chmod': '@nikitajs/core/actions/fs/chmod', + 'chown': '@nikitajs/core/actions/fs/chown', + 'copy': '@nikitajs/core/actions/fs/copy', + 'glob': '@nikitajs/core/actions/fs/glob', + 'hash': '@nikitajs/core/actions/fs/hash', + 'link': '@nikitajs/core/actions/fs/link', + 'mkdir': '@nikitajs/core/actions/fs/mkdir', + 'move': '@nikitajs/core/actions/fs/move', + 'remove': '@nikitajs/core/actions/fs/remove', + 'wait': '@nikitajs/core/actions/fs/wait' }, 'registry': { 'get': { @@ -62,7 +62,7 @@ module.exports = { raw: true }, handler: function({parent, args: [namespace, action]}) { - return parent.registry.register(namespace, action); + parent.registry.register(namespace, action); } }, 'registered': { @@ -78,23 +78,16 @@ module.exports = { raw: true }, handler: function({parent, args: [namespace]}) { - return parent.registry.unregister(namespace); + parent.registry.unregister(namespace); } } }, 'ssh': { - 'open': '@nikitajs/core/lib/actions/ssh/open', - 'close': '@nikitajs/core/lib/actions/ssh/close', - 'root': '@nikitajs/core/lib/actions/ssh/root' + 'open': '@nikitajs/core/actions/ssh/open', + 'close': '@nikitajs/core/actions/ssh/close', + 'root': '@nikitajs/core/actions/ssh/root' }, - 'wait': '@nikitajs/core/lib/actions/wait' + 'wait': '@nikitajs/core/actions/wait' }; -(async function() { - try { - return (await registry.register(module.exports)); - } catch (error) { - console.error(error.stack); - return process.exit(1); - } -})(); +await registry.register(actions) diff --git a/packages/core/lib/registry.js b/packages/core/lib/registry.js index 9b9e38c85..16ee9608c 100644 --- a/packages/core/lib/registry.js +++ b/packages/core/lib/registry.js @@ -7,8 +7,8 @@ Management facility to register and unregister actions. */ // Dependencies -const {is_object, merge, mutate} = require('mixme'); -const normalize = require('./session/normalize'); +import {is_object, merge, mutate} from 'mixme'; +import normalize from '@nikitajs/core/session/normalize'; // Register all functions const create = function({chain, on_register, parent, plugins} = {}) { @@ -55,7 +55,7 @@ const create = function({chain, on_register, parent, plugins} = {}) { if (typeof module !== 'string') { throw Error(`Invalid Argument: module must be a string, got ${module.toString()}`); } - const action = await require.main.require(module); + const action = (await import(module)).default; if (typeof action === 'function') { action = { handler: action @@ -161,7 +161,7 @@ const create = function({chain, on_register, parent, plugins} = {}) { let child_store = store; const namespaceTemp = namespace.concat(['']); for (let i = 0; i < namespaceTemp.length; i++) { - n = namespaceTemp[i]; + const n = namespaceTemp[i]; if (!child_store[n]) { break; } @@ -191,12 +191,12 @@ const create = function({chain, on_register, parent, plugins} = {}) { return (await plugins.call({ name: 'nikita:registry:normalize', args: action, - handler: function(action) { - return normalize(action); + handler: async function(action) { + return await normalize(action); } })); } else { - return normalize(action); + return await normalize(action); } }; /* @@ -429,4 +429,4 @@ const create = function({chain, on_register, parent, plugins} = {}) { return obj; }; -module.exports = create(); +export default create(); diff --git a/packages/core/lib/schedulers/index.js b/packages/core/lib/schedulers/index.js index afdc8c58a..bfbe28405 100644 --- a/packages/core/lib/schedulers/index.js +++ b/packages/core/lib/schedulers/index.js @@ -1,7 +1,3 @@ -// Generated by CoffeeScript 2.7.0 -var utils; - -utils = require('../utils'); /* Usage: @@ -15,7 +11,7 @@ Options: Prevent the execution of newly registered tasks, call resume to trigger the execution. */ -module.exports = function(handlers, options) { +export default function(handlers, options) { var opts, promise, scheduler, stack, state; if (Array.isArray(handlers) || (handlers != null) === false) { if (options == null) { diff --git a/packages/core/lib/session.js b/packages/core/lib/session.js index eae8ba6ac..585d5cc4f 100644 --- a/packages/core/lib/session.js +++ b/packages/core/lib/session.js @@ -1,18 +1,18 @@ -const {merge} = require('mixme'); -const each = require('each'); -const registry = require('./registry'); -const {plugandplay} = require('plug-and-play'); -const contextualize = require('./session/contextualize'); -const normalize = require('./session/normalize'); -const utils = require('./utils'); +import {merge} from 'mixme'; +import each from 'each'; +import {plugandplay} from 'plug-and-play'; +import registry from '@nikitajs/core/registry'; +import contextualize from '@nikitajs/core/session/contextualize'; +import normalize from '@nikitajs/core/session/normalize'; +import utils from '@nikitajs/core/utils'; const session = function(args, options = {}) { // Catch calls to new actions let namespace = []; const on_call = function(...args) { let nm; - // Extract action namespace and reset the state + // Extract action namespace and reset its value [namespace, nm] = [[], namespace]; // Schedule the action and get the result as a promise const prom = action.scheduler.call(async function() { @@ -21,9 +21,7 @@ const session = function(args, options = {}) { if (!child) { return Promise.reject(utils.error('ACTION_UNREGISTERED_NAMESPACE', ['no action is registered under this namespace,', `got ${JSON.stringify(nm)}.`])); } - const args_is_array = args.some(function(arg) { - return Array.isArray(arg); - }); + const args_is_array = args.some( (arg) => Array.isArray(arg) ); if (!args_is_array || child.metadata?.raw_input) { return session(args, { namespace: nm, @@ -146,11 +144,15 @@ const session = function(args, options = {}) { } // Load action from registry if (action.metadata.namespace) { - const action_from_registry = (await action.registry.get(action.metadata.namespace)); - // Merge the registry action with the user action properties - for (const k in action_from_registry) { - const v = action_from_registry[k]; - action[k] = merge(action_from_registry[k], action[k]); + try{ + const action_from_registry = await action.registry.get(action.metadata.namespace); + // Merge the registry action with the user action properties + for (const k in action_from_registry) { + const v = action_from_registry[k]; + action[k] = merge(action_from_registry[k], action[k]); + } + }catch(err){ + return reject(err); } } // Switch the scheduler to register actions inside the handler @@ -230,10 +232,10 @@ const session = function(args, options = {}) { }); }; -module.exports = function(...args) { +export default function(...args) { return session(args); }; -module.exports.with_options = function(args, options) { +export const with_options = function(args, options) { return session(args, options); }; diff --git a/packages/core/lib/session/contextualize.js b/packages/core/lib/session/contextualize.js index dc6f246e4..e95e89261 100644 --- a/packages/core/lib/session/contextualize.js +++ b/packages/core/lib/session/contextualize.js @@ -1,73 +1,85 @@ -// Generated by CoffeeScript 2.7.0 -var indexOf = [].indexOf; - -const {mutate, is_object_literal} = require('mixme'); -const utils = require('../utils'); +import { mutate, is_object_literal } from "mixme"; +import utils from "@nikitajs/core/utils"; const properties = [ - 'context', 'handler', 'hooks', 'metadata', - 'config', 'parent', 'plugins', 'registry', - 'run', 'scheduler', 'ssh', 'state' + "context", + "handler", + "hooks", + "metadata", + "config", + "parent", + "plugins", + "registry", + "run", + "scheduler", + "ssh", + "state", ]; -module.exports = function(args) { +export default function (args) { // Reconstituate the action const default_action = () => ({ config: {}, metadata: {}, hooks: {}, - state: {} + state: {}, }); const new_action = default_action(); for (const arg of args) { switch (typeof arg) { - case 'function': + case "function": if (new_action.handler) { - throw utils.error('NIKITA_SESSION_INVALID_ARGUMENTS', [`handler is already registered, got ${utils.error.got(arg)}`]); + throw utils.error("NIKITA_SESSION_INVALID_ARGUMENTS", [ + `handler is already registered, got ${utils.error.got(arg)}`, + ]); } mutate(new_action, { - handler: arg + handler: arg, }); break; - case 'string': + case "string": if (new_action.handler) { - throw utils.error('NIKITA_SESSION_INVALID_ARGUMENTS', [`handler is already registered, got ${JSON.stringigy(arg)}`]); + throw utils.error("NIKITA_SESSION_INVALID_ARGUMENTS", [ + `handler is already registered, got ${JSON.stringigy(arg)}`, + ]); } mutate(new_action, { metadata: { - argument: arg - } + argument: arg, + }, }); break; - case 'object': + case "object": if (Array.isArray(arg)) { - throw utils.error('NIKITA_SESSION_INVALID_ARGUMENTS', [`argument cannot be an array, got ${utils.error.got(arg)}`]); + throw utils.error("NIKITA_SESSION_INVALID_ARGUMENTS", [ + `argument cannot be an array, got ${utils.error.got(arg)}`, + ]); } if (arg === null) { mutate(new_action, { metadata: { - argument: null - } + argument: null, + }, }); } else if (is_object_literal(arg)) { for (const k in arg) { const v = arg[k]; - if (k === '$') { + if (k === "$") { // mutate new_action, v for (const kk in v) { const vv = v[kk]; - if (['config', 'metadata'].includes(kk)) { - new_action[kk] = {...new_action[kk], ...vv}; + if (["config", "metadata"].includes(kk)) { + new_action[kk] = { ...new_action[kk], ...vv }; } else { new_action[kk] = vv; } } - } else if (k[0] === '$') { - if (k === '$$') { + } else if (k[0] === "$") { + if (k === "$$") { mutate(new_action.metadata, v); } else { - prop = k.substr(1); - if (indexOf.call(properties, prop) >= 0) { + const prop = k.substr(1); + if (properties.includes(prop)) { new_action[prop] = v; } else { new_action.metadata[prop] = v; @@ -82,20 +94,20 @@ module.exports = function(args) { } else { mutate(new_action, { metadata: { - argument: arg - } + argument: arg, + }, }); } break; default: mutate(new_action, { metadata: { - argument: arg - } + argument: arg, + }, }); } } // Create empty action when no arguments are provided and not for an empty array // new_actions = default_action() if not args.length return new_action; -}; +} diff --git a/packages/core/lib/session/normalize.js b/packages/core/lib/session/normalize.js index ca74bbd60..9d5e63ac6 100644 --- a/packages/core/lib/session/normalize.js +++ b/packages/core/lib/session/normalize.js @@ -1,13 +1,8 @@ -const properties = [ - 'context', 'handler', 'hooks', 'metadata', 'config', - 'parent', 'plugins', 'registry', 'run', 'scheduler', 'state' -]; - -module.exports = function(action) { +export default function(action) { if (Array.isArray(action)) { - return action.map(function(action) { - return module.exports(action); + return action.map(async function(action) { + return (await import(action)).default; }); } return action; diff --git a/packages/core/lib/utils/array.js b/packages/core/lib/utils/array.js index 25e9df52a..682420ac2 100644 --- a/packages/core/lib/utils/array.js +++ b/packages/core/lib/utils/array.js @@ -1,147 +1,133 @@ -const test = function(arr, depth = -1) { - var i, k, ref, ret; - ret = []; - for (i = k = 0, ref = arr.length; (0 <= ref ? k < ref : k > ref); i = 0 <= ref ? ++k : --k) { - if (Array.isArray(arr[i])) { - if (depth === 0) { - ret.push(...arr[i]); - } else { - ret.push(...test(arr[i], depth - 1)); - } - } else { - ret.push(arr[i]); - } +// const compare = (array1, array2) -> +// # Compare lengths and save some time +// if array1.length isnt array2.length +// return false +// for i in [0...array1.length] +// # Check if we have nested arrays +// if Array.isArray(array1[i]) and Array.isArray(array2[i]) +// # recurse into the nested arrays +// if !array1[i].equals array2[i] +// return false +// else if array1[i] != array2[i] +// # Warning - two different object instances will never be equal: {x:20} != {x:20} +// return false +// return true + +const clone = function (arr) { + var el, i, k, len, ret; + ret = [arr.length]; + for (i = k = 0, len = arr.length; k < len; i = ++k) { + el = arr[i]; + ret[i] = el; } return ret; }; -module.exports = { - // compare: (array1, array2) -> - // # compare lengths - can save a lot of time - // if array1.length isnt array2.length - // return false - // for i in [0...array1.length] - // # Check if we have nested arrays - // if Array.isArray(array1[i]) and Array.isArray(array2[i]) - // # recurse into the nested arrays - // if !array1[i].equals array2[i] - // return false - // else if array1[i] != array2[i] - // # Warning - two different object instances will never be equal: {x:20} != {x:20} - // return false - // return true - clone: function(arr) { - var el, i, k, len, ret; - ret = [arr.length]; - for (i = k = 0, len = arr.length; k < len; i = ++k) { - el = arr[i]; - ret[i] = el; - } - return ret; - }, - intersect: function(array) { - var argument, i, item, j, k, l, len, len1, result; - if (array === null) { - return []; + +const intersect = function (array) { + var argument, i, item, j, k, l, len, len1, result; + if (array === null) { + return []; + } + result = []; + for (i = k = 0, len = array.length; k < len; i = ++k) { + item = array[i]; + if (result.indexOf(item) !== -1) { + continue; } - result = []; - for (i = k = 0, len = array.length; k < len; i = ++k) { - item = array[i]; - if (result.indexOf(item) !== -1) { - continue; - } - for (j = l = 0, len1 = arguments.length; l < len1; j = ++l) { - argument = arguments[j]; - if (argument.indexOf(item) === -1) { - break; - } - } - if (j === arguments.length) { - result.push(item); + for (j = l = 0, len1 = arguments.length; l < len1; j = ++l) { + argument = arguments[j]; + if (argument.indexOf(item) === -1) { + break; } } - return result; - }, - flatten: function(arr, depth=Infinity) { - if(depth === -1){ depth = Infinity} - return arr.flat(depth) - }, - // flatten: function(arr, depth = -1) { - // var i, k, ref, ret; - // ret = []; - // for (i = k = 0, ref = arr.length; (0 <= ref ? k < ref : k > ref); i = 0 <= ref ? ++k : --k) { - // if (Array.isArray(arr[i])) { - // if (depth === 0) { - // ret.push(...arr[i]); - // } else { - // ret.push(...module.exports.flatten(arr[i], depth - 1)); - // } - // } else { - // ret.push(arr[i]); - // } - // } - // return ret; - // }, - multiply: function(...args) { - // Convert every argument to an array - for (let i = 0; i < args.length; i++) { - const arg = args[i]; - if (!Array.isArray(arg)) { - args[i] = [arg]; - } + if (j === arguments.length) { + result.push(item); + } + } + return result; +}; + +const flatten = function (arr, depth = Infinity) { + return arr.flat(depth === -1 ? Infinity : depth); +}; + +const multiply = function (...args) { + // Convert every argument to an array + for (let i = 0; i < args.length; i++) { + const arg = args[i]; + if (!Array.isArray(arg)) { + args[i] = [arg]; } - // Multiply arguments - let results = []; - for (let i = 0; i < args.length; i++) { - const arg = args[i]; - const newresults = (function() { - const results1 = []; - for (let j = 0; j < arg.length; j++) { - const arg_element = arg[j]; - // Every element of the first argument will initialize results - if (i === 0) { - results1.push([[arg_element]]); - } else { - results1.push((function() { + } + // Multiply arguments + let results = []; + for (let i = 0; i < args.length; i++) { + const arg = args[i]; + const newresults = (function () { + const results1 = []; + for (let j = 0; j < arg.length; j++) { + const arg_element = arg[j]; + // Every element of the first argument will initialize results + if (i === 0) { + results1.push([[arg_element]]); + } else { + results1.push( + (function () { const results2 = []; for (let i = 0; i < results.length; i++) { const action = results[i]; results2.push([...action, arg_element]); } return results2; - })()); - } + })() + ); } - return results1; - })(); - results = newresults.flat(1); - } - return results; - }, - merge: function(...arrays) { - const r = []; - for (const array of arrays) { - for (const el of array) { - r.push(el); } + return results1; + })(); + results = newresults.flat(1); + } + return results; +}; + +const merge = function (...arrays) { + const r = []; + for (const array of arrays) { + for (const el of array) { + r.push(el); } - return r; - }, - shuffle: function(a) { - if (a.length <= 1) { - return a; - } - for (i = a.length - 1; i > 0; i--) { - const j = Math.floor(Math.random() * (i + 1)); - [a[i], a[j]] = [a[j], a[i]]; - } + } + return r; +}; + +const shuffle = function (a) { + if (a.length <= 1) { return a; - }, - unique: function(array) { - const obj = {}; - for (let el of array) { - obj[el] = true; - } - return Object.keys(obj); } + for (let i = a.length - 1; i > 0; i--) { + const j = Math.floor(Math.random() * (i + 1)); + [a[i], a[j]] = [a[j], a[i]]; + } + return a; +}; + +const unique = function (array) { + const obj = {}; + for (const el of array) { + obj[el] = true; + } + return Object.keys(obj); +}; + +export { clone, intersect, flatten, multiply, merge, shuffle, unique }; + +export default { + clone: clone, + intersect: intersect, + flatten: flatten, + multiply: multiply, + merge: merge, + shuffle: shuffle, + unique: unique, }; diff --git a/packages/core/lib/utils/buffer.js b/packages/core/lib/utils/buffer.js index 580f2063f..1204ee356 100644 --- a/packages/core/lib/utils/buffer.js +++ b/packages/core/lib/utils/buffer.js @@ -1,9 +1,13 @@ -module.exports = { - // Treat the buffer as a string and trim whitespace characters. - // Quick and dirt way to trim, alternative is to loop forward+backwark, - // detect all whitespace charactes, get a start and end and finaly resize the buffer. - trim: function(buf, encoding) { - return Buffer.from(buf.toString(encoding).trim()); - } +// Treat the buffer as a string and trim whitespace characters. +// Quick and dirt way to trim, alternative is to loop forward+backwark, +// detect all whitespace charactes, get a start and end and finaly resize the buffer. +const trim = function(buf, encoding) { + return Buffer.from(buf.toString(encoding).trim()); +} + +export { trim }; + +export default { + trim: trim }; diff --git a/packages/core/lib/utils/error.js b/packages/core/lib/utils/error.js index ae162c01b..57fec7a0c 100644 --- a/packages/core/lib/utils/error.js +++ b/packages/core/lib/utils/error.js @@ -1,10 +1,11 @@ - const NikitaError = class NikitaError extends Error { constructor(code, message, ...contexts) { if (Array.isArray(message)) { - message = message.filter(function(line) { - return !!line; - }).join(' '); + message = message + .filter(function (line) { + return !!line; + }) + .join(" "); } message = `${code}: ${message}`; super(message); @@ -14,13 +15,14 @@ const NikitaError = class NikitaError extends Error { this.code = code; for (const context of contexts) { for (const key in context) { - if (key === 'code') { + if (key === "code") { continue; } const value = context[key]; - if (value === undefined) { continue; } - this[key] = - Buffer.isBuffer(value) + if (value === undefined) { + continue; + } + this[key] = Buffer.isBuffer(value) ? value.toString() : value === null ? value @@ -28,36 +30,42 @@ const NikitaError = class NikitaError extends Error { } } } - }; -module.exports = function() { +const error = function () { return new NikitaError(...arguments); }; -module.exports.got = function(value, {depth = 0, max_depth = 3} = {}) { +const got = function (value, { depth = 0, max_depth = 3 } = {}) { switch (typeof value) { - case 'function': - return 'function'; - case 'object': + case "function": + return "function"; + case "object": if (Array.isArray(value)) { const out = []; for (const el of value) { if (depth === max_depth) { - out.push('\u2026'); + out.push("\u2026"); } else { - out.push(module.exports.got(el, { - depth: depth + 1, - max_depth: max_depth - })); + out.push( + got(el, { + depth: depth + 1, + max_depth: max_depth, + }) + ); } } - return `[${out.join(',')}]`; + return `[${out.join(",")}]`; } else { return JSON.stringify(value); } - break; default: return JSON.stringify(value); } }; + +error.got = got; + +export { got }; + +export default error; diff --git a/packages/core/lib/utils/index.js b/packages/core/lib/utils/index.js index 0105b3329..d329ae999 100644 --- a/packages/core/lib/utils/index.js +++ b/packages/core/lib/utils/index.js @@ -1,17 +1,48 @@ -// Generated by CoffeeScript 2.7.0 -module.exports = { - array: require('./array'), - buffer: require('./buffer'), - error: require('./error'), - mode: require('./mode'), - object: require('./object'), - os: require('./os'), - promise: require('./promise'), - regexp: require('./regexp'), - schema: require('./schema'), - semver: require('./semver'), - ssh: require('./ssh'), - stats: require('./stats'), - string: require('./string'), - tilde: require('./tilde') +import array from "@nikitajs/core/utils/array"; +import buffer from "@nikitajs/core/utils/buffer"; +import error from "@nikitajs/core/utils/error"; +import mode from "@nikitajs/core/utils/mode"; +import object from "@nikitajs/core/utils/object"; +import os from "@nikitajs/core/utils/os"; +import promise from "@nikitajs/core/utils/promise"; +import regexp from "@nikitajs/core/utils/regexp"; +import schema from "@nikitajs/core/utils/schema"; +import semver from "@nikitajs/core/utils/semver"; +import ssh from "@nikitajs/core/utils/ssh"; +import stats from "@nikitajs/core/utils/stats"; +import string from "@nikitajs/core/utils/string"; +import tilde from "@nikitajs/core/utils/tilde"; + +export { + array, + buffer, + error, + mode, + object, + os, + promise, + regexp, + schema, + semver, + ssh, + stats, + string, + tilde, +}; + +export default { + array: array, + buffer: buffer, + error: error, + mode: mode, + object: object, + os: os, + promise: promise, + regexp: regexp, + schema: schema, + semver: semver, + ssh: ssh, + stats: stats, + string: string, + tilde: tilde, }; diff --git a/packages/core/lib/utils/mode.js b/packages/core/lib/utils/mode.js index 9019d60e1..a205804e4 100644 --- a/packages/core/lib/utils/mode.js +++ b/packages/core/lib/utils/mode.js @@ -1,34 +1,37 @@ -// Generated by CoffeeScript 2.7.0 -module.exports = { - stringify: function(mode) { - if (typeof mode === 'number') { - return mode.toString(8); - } else { - return mode; - } - }, - /* - Compare multiple mode. All arguments modes must match. If first mode is any array, then - other arguments mode must much at least one element of the array. - */ - compare: function(...modes) { - let ref = modes[0]; - if (ref == null) { - throw Error(`Invalid mode: ${ref}`); - } - if (!Array.isArray(ref)) { - ref = [ref]; - } - ref = ref.map((mode) => this.stringify(mode) ); - for (let i = 1; i < modes.length; i++) { - const mode = this.stringify(modes[i]); - if (!ref.some(function(m) { +/* +Compare multiple mode. All arguments modes must match. If first mode is any array, then +other arguments mode must much at least one element of the array. +*/ +const compare = function (...modes) { + let ref = modes[0]; + if (ref == null) { + throw Error(`Invalid mode: ${ref}`); + } + if (!Array.isArray(ref)) { + ref = [ref]; + } + ref = ref.map((mode) => this.stringify(mode)); + for (let i = 1; i < modes.length; i++) { + const mode = this.stringify(modes[i]); + if ( + !ref.some(function (m) { const l = Math.min(m.length, mode.length); return m.substr(-l) === mode.substr(-l); - })) { - return false; - } + }) + ) { + return false; } - return true; } + return true; +}; + +const stringify = function (mode) { + return typeof mode === "number" ? mode.toString(8) : mode; +}; + +export { compare, stringify }; + +export default { + compare: compare, + stringify: stringify, }; diff --git a/packages/core/lib/utils/object.js b/packages/core/lib/utils/object.js index bcbc7cda8..f6477db14 100644 --- a/packages/core/lib/utils/object.js +++ b/packages/core/lib/utils/object.js @@ -1,170 +1,192 @@ +import array from "@nikitajs/core/utils/array"; +import regexp from "@nikitajs/core/utils/regexp"; +import { snake_case as snake_case_str } from "@nikitajs/core/utils/string"; +import { is_object_literal } from "mixme"; -const array = require('./array'); -const regexp = require('./regexp'); -const {snake_case} = require('./string'); -const {is_object_literal} = require('mixme'); +const clean = function (content, undefinedOnly) { + for (const k in content) { + const v = content[k]; + if (v && typeof v === "object") { + clean(v, undefinedOnly); + continue; + } + if (typeof v === "undefined") { + delete content[k]; + } + if (!undefinedOnly && v === null) { + delete content[k]; + } + } + return content; +}; -module.exports = { - clean: function(content, undefinedOnly) { - for (const k in content) { - const v = content[k]; - if (v && typeof v === 'object') { - module.exports.clean(v, undefinedOnly); - continue; - } - if (typeof v === 'undefined') { - delete content[k]; - } - if (!undefinedOnly && v === null) { - delete content[k]; - } +const copy = function (source, properties) { + const obj = {}; + for (const property of properties) { + if (source[property] !== undefined) { + obj[property] = source[property]; } - return content; - }, - copy: function(source, properties) { - const obj = {}; - for (const property of properties) { - if (source[property] !== undefined) { - obj[property] = source[property]; - } + } + return obj; +}; + +const diff = function (obj1, obj2, keys) { + if (!keys) { + const keys1 = Object.keys(obj1); + const keys2 = Object.keys(obj2); + keys = array.merge(keys1, keys2, array.unique(keys1)); + } + const diff = {}; + for (const k in obj1) { + const v = obj1[k]; + if (!(keys.indexOf(k) >= 0)) { + continue; } - return obj; - }, - diff: function(obj1, obj2, keys) { - if (!keys) { - const keys1 = Object.keys(obj1); - const keys2 = Object.keys(obj2); - keys = array.merge(keys1, keys2, array.unique(keys1)); - } - const diff = {}; - for (const k in obj1) { - const v = obj1[k]; - if (!(keys.indexOf(k) >= 0)) { - continue; - } - if (obj2[k] === v) { - continue; - } + if (obj2[k] === v) { + continue; + } + diff[k] = []; + diff[k][0] = v; + } + for (const k in obj2) { + const v = obj2[k]; + if (!(keys.indexOf(k) >= 0)) { + continue; + } + if (obj1[k] === v) { + continue; + } + if (diff[k] == null) { diff[k] = []; - diff[k][0] = v; } - for (const k in obj2) { - const v = obj2[k]; - if (!(keys.indexOf(k) >= 0)) { - continue; - } - if (obj1[k] === v) { - continue; - } - if (diff[k] == null) { - diff[k] = []; - } - diff[k][1] = v; - } - return diff; - }, - // equals: (obj1, obj2, keys) -> - // keys1 = Object.keys obj1 - // keys2 = Object.keys obj2 - // if keys - // keys1 = keys1.filter (k) -> keys.indexOf(k) isnt -1 - // keys2 = keys2.filter (k) -> keys.indexOf(k) isnt -1 - // else keys = keys1 - // return false if keys1.length isnt keys2.length - // for k in keys - // return false if obj1[k] isnt obj2[k] - // return true - insert: function(source, keys, value) { - for (let i = 0; i < keys.length; i++) { - const key = keys[i]; - source = source[key]; - if (source === undefined) { - source = source[key] = {}; - } - if (!is_object_literal(source)) { - throw error('NIKITA_UTILS_INSERT', [`Invalid source at path ${keys.slice(0, i)},`, 'it must be an object or undefined,', `got ${JSON.stringify(source)}`]); - } - if (i === keys.length(-1)) { - source[key] = merge(source[key], value); - return; - } + diff[k][1] = v; + } + return diff; +}; + +// equals: (obj1, obj2, keys) -> +// keys1 = Object.keys obj1 +// keys2 = Object.keys obj2 +// if keys +// keys1 = keys1.filter (k) -> keys.indexOf(k) isnt -1 +// keys2 = keys2.filter (k) -> keys.indexOf(k) isnt -1 +// else keys = keys1 +// return false if keys1.length isnt keys2.length +// for k in keys +// return false if obj1[k] isnt obj2[k] +// return true + +const insert = function (source, keys, value) { + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + source = source[key]; + if (source === undefined) { + source = source[key] = {}; } - }, - match: function(source, target) { - if (is_object_literal(target)) { - if (!is_object_literal(source)) { - return false; - } - for (const k in target) { - const v = target[k]; - if (!module.exports.match(source[k], v)) { - return false; - } - } - return true; - } else if (Array.isArray(target)) { - if (!Array.isArray(source)) { - return false; - } - if (target.length !== source.length) { + if (!is_object_literal(source)) { + throw error("NIKITA_UTILS_INSERT", [ + `Invalid source at path ${keys.slice(0, i)},`, + "it must be an object or undefined,", + `got ${JSON.stringify(source)}`, + ]); + } + if (i === keys.length(-1)) { + source[key] = merge(source[key], value); + return; + } + } +}; + +const match = function (source, target) { + if (is_object_literal(target)) { + if (!is_object_literal(source)) { + return false; + } + for (const k in target) { + const v = target[k]; + if (!match(source[k], v)) { return false; } - for (const i in target) { - const v = target[i]; - if (!module.exports.match(source[i], v)) { - return false; - } - } - return true; - } else if (typeof source === 'string') { - if (regexp.is(target)) { - return target.test(source); - } else if (Buffer.isBuffer(target)) { - return target.equals(Buffer.from(source)); - } else { - return source === target; - } - } else if (Buffer.isBuffer(source)) { - if (Buffer.isBuffer(target)) { - return source.equals(target); - } else if (typeof target === 'string') { - return source.equals(Buffer.from(target)); - } else { + } + return true; + } else if (Array.isArray(target)) { + if (!Array.isArray(source)) { + return false; + } + if (target.length !== source.length) { + return false; + } + for (const i in target) { + const v = target[i]; + if (!match(source[i], v)) { return false; } + } + return true; + } else if (typeof source === "string") { + if (regexp.is(target)) { + return target.test(source); + } else if (Buffer.isBuffer(target)) { + return target.equals(Buffer.from(source)); } else { return source === target; } - }, - filter: function(source, black, white) { - if (black == null) { - black = []; - } - const obj = {}; - // If white list, only use the selected list - // Otherwise clone it all - for (const key of (white != null ? white : Object.keys(source))) { - if (source.hasOwnProperty(key) && !black.includes(key)) { - // unless part of black list - obj[key] = source[key]; - } + } else if (Buffer.isBuffer(source)) { + if (Buffer.isBuffer(target)) { + return source.equals(target); + } else if (typeof target === "string") { + return source.equals(Buffer.from(target)); + } else { + return false; + } + } else { + return source === target; + } +}; + +const filter = function (source, black, white) { + if (black == null) { + black = []; + } + const obj = {}; + // If white list, only use the selected list + // Otherwise clone it all + for (const key of white != null ? white : Object.keys(source)) { + if (source.hasOwnProperty(key) && !black.includes(key)) { + // unless part of black list + obj[key] = source[key]; } - return obj; - }, - snake_case: function(source) { - const obj = {}; - for (const key in source) { - const value = source[key]; - obj[snake_case(key)] = value; - } - return obj; - }, - trim: function(obj) { - const result = {}; - for (const k in obj) { - const v = obj[k] - result[k.trim()] = typeof v === "string" ? v.trim() : v; - } - return result; } + return obj; +}; + +const snake_case = function (source) { + const obj = {}; + for (const key in source) { + const value = source[key]; + obj[snake_case_str(key)] = value; + } + return obj; +}; + +const trim = function (obj) { + const result = {}; + for (const k in obj) { + const v = obj[k]; + result[k.trim()] = typeof v === "string" ? v.trim() : v; + } + return result; +}; + +export { clean, copy, diff, insert, match, filter, snake_case, trim }; + +export default { + clean: clean, + copy: copy, + diff: diff, + insert: insert, + match: match, + filter: filter, + snake_case: snake_case, + trim: trim, }; diff --git a/packages/core/lib/utils/os.js b/packages/core/lib/utils/os.js index f1875866e..d825d1245 100644 --- a/packages/core/lib/utils/os.js +++ b/packages/core/lib/utils/os.js @@ -1,92 +1,99 @@ +import path from "path"; +import dedent from "dedent"; -const dedent = require('dedent'); -const path = require('path'); +/* +Shell command to print archictecture, OS name, version release and linux +version release to stdout. -module.exports = { - /* - Shell command to print archictecture, OS name, version release and linux - version release to stdout. +The `arch` property is obtained from `uname -m`. Note, on Apple M1, `uname -p` +return `arm` when `uname -m` return `arm64`. See the [`uname` possible +values](https://en.wikipedia.org/wiki/Uname#Examples). - The `arch` property is obtained from `uname -m`. Note, on Apple M1, `uname -p` - return `arm` when `uname -m` return `arm64`. See the [`uname` possible - values](https://en.wikipedia.org/wiki/Uname#Examples). +The `name` property return one of 'rhel', 'centos', 'ubuntu', 'debian' or +'arch'. Other distributions are not yet implemented. - The `name` property return one of 'rhel', 'centos', 'ubuntu', 'debian' or - 'arch'. Other distributions are not yet implemented. +TODO: +- we shall implement a property `distrib` to distinguish `rhel` from `centos` +- name shall return an array of possible match, eg CentOS 6 matching + `centos6`, `centos`, `rhel`, `linux`. This way, the `if_os` condition will + match against multiple keywords. - TODO: - - we shall implement a property `distrib` to distinguish `rhel` from `centos` - - name shall return an array of possible match, eg CentOS 6 matching - `centos6`, `centos`, `rhel`, `linux`. This way, the `if_os` condition will - match against multiple keywords. +The following distributions are supported and tested: +* RHEL 6, CentOS 6 +* RHEL 7, CentOS 7 +* Ubuntu/Debian +* Arch Linux +* TODO: support RHEL 8, CentOS 8 + */ +const command = dedent` +#ARCH=$(uname -m | sed 's/x86_//;s/i[3-6]86/32/') +ARCH=$(uname -m) +#LINUX_VERSION=\`uname -r | sed 's/\\(.*\\)-.*/\\1/'\` +LINUX_VERSION=\`uname -r\` +# RHEL 7 (CentOS 7), Ubuntu/Debian, Arch Linux +if [ -f /etc/os-release ]; then + DISTRIB=\`cat /etc/os-release | egrep '^ID=' | sed 's/^\\(ID="\\?\\)\\?\\([A-Za-z]*\\).*/\\2/'\` +# RHEL 6 (CentOS 6) +elif [ -f /etc/redhat-release ]; then + DISTRIB=\`cat /etc/redhat-release | sed 's/^\\(Red \\)\\?\\([A-Za-z]*\\).*/\\1\\2/' | tr '[:upper:]' '[:lower:]'\` + if [ $DISTRIB == 'red hat' ]; then + DISTRIB='rhel' + fi +else + exit 2 +fi +case $DISTRIB in + # RHEL and CentOS + rhel|centos|rocky|alma) + # \`cat /etc/redhat-release\` prints for: + # - CentOS 6: 'CentOS release 6.10 (Final)' + # - CentOS 7: 'CentOS Linux release 7.9.2009 (Core)' + # - RHEL 6: 'Red Hat Enterprise Linux Server release 6.4 (Santiago)' + # - RHEL 7: 'Red Hat Enterprise Linux Server release 7.0 (Maipo)' + # Note, "alma" is not tested + VERSION=\`cat /etc/redhat-release | sed 's/.* \\([0-9]\\)\\(\\(\\.*[0-9]\\)*\\) .*/\\1\\2/'\` + ;; + # Ubuntu + ubuntu) + . /etc/lsb-release + VERSION=$DISTRIB_RELEASE + ;; + # Debian + debian) + VERSION=\`cat /etc/debian_version\` + ;; + # Arch Linux + arch) + VERSION='' + ;; + *) + echo -n $DISTRIB + exit 2 +esac +echo -n "$ARCH|$DISTRIB|$VERSION|$LINUX_VERSION" +`; - The following distributions are supported and tested: - * RHEL 6, CentOS 6 - * RHEL 7, CentOS 7 - * Ubuntu/Debian - * Arch Linux - * TODO: support RHEL 8, CentOS 8 - */ - command: dedent` - #ARCH=$(uname -m | sed 's/x86_//;s/i[3-6]86/32/') - ARCH=$(uname -m) - #LINUX_VERSION=\`uname -r | sed 's/\\(.*\\)-.*/\\1/'\` - LINUX_VERSION=\`uname -r\` - # RHEL 7 (CentOS 7), Ubuntu/Debian, Arch Linux - if [ -f /etc/os-release ]; then - DISTRIB=\`cat /etc/os-release | egrep '^ID=' | sed 's/^\\(ID="\\?\\)\\?\\([A-Za-z]*\\).*/\\2/'\` - # RHEL 6 (CentOS 6) - elif [ -f /etc/redhat-release ]; then - DISTRIB=\`cat /etc/redhat-release | sed 's/^\\(Red \\)\\?\\([A-Za-z]*\\).*/\\1\\2/' | tr '[:upper:]' '[:lower:]'\` - if [ $DISTRIB == 'red hat' ]; then - DISTRIB='rhel' - fi - else - exit 2 - fi - case $DISTRIB in - # RHEL and CentOS - rhel|centos) - # \`cat /etc/redhat-release\` prints for: - # - CentOS 6: 'CentOS release 6.10 (Final)' - # - CentOS 7: 'CentOS Linux release 7.9.2009 (Core)' - # - RHEL 6: 'Red Hat Enterprise Linux Server release 6.4 (Santiago)' - # - RHEL 7: 'Red Hat Enterprise Linux Server release 7.0 (Maipo)' - VERSION=\`cat /etc/redhat-release | sed 's/.* \\([0-9]\\)\\(\\(\\.*[0-9]\\)*\\) .*/\\1\\2/'\` - ;; - # Ubuntu - ubuntu) - . /etc/lsb-release - VERSION=$DISTRIB_RELEASE - ;; - # Debian - debian) - VERSION=\`cat /etc/debian_version\` - ;; - # Arch Linux - arch) - VERSION='' - ;; - *) - exit 2 - esac - echo -n "$ARCH|$DISTRIB|$VERSION|$LINUX_VERSION" - `, - whoami: function({ssh, platform = process.platform} = {}) { - if (ssh) { - return ssh.config.username; - } - if (/^win/.test(platform)) { - return process.env['USERPROFILE'].split(path.win32.sep)[2]; - } - if (process.env['USER']) { - return process.env['USER']; - } - if (/^\/root$/.test(process.env['HOME'])) { - return process.env['HOME'].split('/')[1]; - } - if (/^\/home\/[^\/]+$/.test(process.env['HOME'])) { - return process.env['HOME'].split('/')[2]; - } +const whoami = function ({ ssh, platform = process.platform } = {}) { + if (ssh) { + return ssh.config.username; } + if (/^win/.test(platform)) { + return process.env["USERPROFILE"].split(path.win32.sep)[2]; + } + if (process.env["USER"]) { + return process.env["USER"]; + } + if (/^\/root$/.test(process.env["HOME"])) { + return process.env["HOME"].split("/")[1]; + } + if (/^\/home\/[^\/]+$/.test(process.env["HOME"])) { + return process.env["HOME"].split("/")[2]; + } +}; + +export { command, whoami }; + +export default { + command: command, + whoami: whoami, }; diff --git a/packages/core/lib/utils/promise.js b/packages/core/lib/utils/promise.js index 86bdfe5b3..865c9880a 100644 --- a/packages/core/lib/utils/promise.js +++ b/packages/core/lib/utils/promise.js @@ -1,25 +1,21 @@ +import util from "util"; -const util = require('util') +const array_filter = async function (arr, handler) { + const fail = Symbol(); + return ( + await Promise.all( + arr.map(async (item) => ((await handler(item)) ? item : fail)) + ) + ).filter((i) => i !== fail); +}; + +const is = function (obj) { + return util.types.isPromise(obj); +}; + +export { array_filter, is }; -module.exports = { - array_filter: async function(arr, handler) { - const fail = Symbol(); - return ( - await Promise.all( - arr.map(async function(item) { - if (await handler(item)) { - return item; - } else { - return fail; - } - }) - ) - ).filter(function(i) { - return i !== fail; - }); - }, - is: function(obj) { - // return !!obj && (typeof obj === 'object' || typeof obj === 'function') && typeof obj.then === 'function'; - return util.types.isPromise(obj) - } +export default { + array_filter: array_filter, + is: is, }; diff --git a/packages/core/lib/utils/regexp.js b/packages/core/lib/utils/regexp.js index 618afc10b..01d9187e2 100644 --- a/packages/core/lib/utils/regexp.js +++ b/packages/core/lib/utils/regexp.js @@ -1,14 +1,20 @@ -const quote = require('regexp-quote'); - -module.exports = { - // Escape RegExp related charracteres - // eg `///^\*/\w+@#{misc.regexp.escape realm}\s+\*///mg` - escape: function(str) { - return str.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); - }, - is: function(reg) { - return reg instanceof RegExp; - }, +import quote from 'regexp-quote'; + +// Escape RegExp related charracteres +// eg `///^\*/\w+@#{misc.regexp.escape realm}\s+\*///mg` +const escape = function(str) { + return str.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); +}; + +const is = function(reg) { + return reg instanceof RegExp; +}; + +export { escape, is, quote }; + +export default { + escape: escape, + is: is, quote: quote }; diff --git a/packages/core/lib/utils/schema.js b/packages/core/lib/utils/schema.js index 97d511619..4a8dd7788 100644 --- a/packages/core/lib/utils/schema.js +++ b/packages/core/lib/utils/schema.js @@ -1,13 +1,16 @@ +const encodings = [ + "ascii", // For 7-bit ASCII data only. This encoding is fast and will strip the high bit if set. + "base64", // Base64 encoding. When creating a Buffer from a string, this encoding will also correctly accept "URL and Filename Safe Alphabet" as specified in RFC 4648, Section 5. + "binary", // Alias for 'latin1'. + "hex", // Encode each byte as two hexadecimal characters. + "latin1", // A way of encoding the Buffer into a one-byte encoded string (as defined by the IANA in RFC 1345, page 63, to be the Latin-1 supplement block and C0/C1 control codes). + "ucs2", // Alias of 'utf16le'. + "utf8", // Multibyte encoded Unicode characters. Many web pages and other document formats use UTF-8. + "utf16le", // 2 or 4 bytes, little-endian encoded Unicode characters. Surrogate pairs (U+10000 to U+10FFFF) are supported. +]; -module.exports = { - encodings: [ - 'ascii', // For 7-bit ASCII data only. This encoding is fast and will strip the high bit if set. - 'base64', // Base64 encoding. When creating a Buffer from a string, this encoding will also correctly accept "URL and Filename Safe Alphabet" as specified in RFC 4648, Section 5. - 'binary', // Alias for 'latin1'. - 'hex', // Encode each byte as two hexadecimal characters. - 'latin1', // A way of encoding the Buffer into a one-byte encoded string (as defined by the IANA in RFC 1345, page 63, to be the Latin-1 supplement block and C0/C1 control codes). - 'ucs2', // Alias of 'utf16le'. - 'utf8', // Multibyte encoded Unicode characters. Many web pages and other document formats use UTF-8. - 'utf16le' // 2 or 4 bytes, little-endian encoded Unicode characters. Surrogate pairs (U+10000 to U+10FFFF) are supported. - ] +export { encodings }; + +export default { + encodings: encodings, }; diff --git a/packages/core/lib/utils/semver.js b/packages/core/lib/utils/semver.js index 9ce4a3c10..960bec120 100644 --- a/packages/core/lib/utils/semver.js +++ b/packages/core/lib/utils/semver.js @@ -1,37 +1,40 @@ +import semver from "semver"; -const crypto = require('crypto'); - -module.exports = { - sanitize: function(versions, fill = 'x') { - const is_array = Array.isArray(versions); - if (!is_array) { - versions = [versions]; +const sanitize = function (versions, fill = "x") { + const is_array = Array.isArray(versions); + if (!is_array) { + versions = [versions]; + } + for (let i = 0; i < versions.length; i++) { + let version = versions[i]; + version = version.split("."); + version = version.slice(0, 3); + while (version.length < 3) { + version.push(fill); } - for (let i = 0; i < versions.length; i++) { - let version = versions[i]; - version = version.split('.'); - version = version.slice(0, 3); - while (version.length < 3) { - version.push(fill) + version = version.map((v) => { + if (!isNaN(parseInt(v, 10))) { + // Ubuntu style, remove trailing '0' + return `${parseInt(v, 10)}`; } - version = version.map(function(v) { - if (!isNaN(parseInt(v, 10))) { - // Ubuntu style, remove trailing '0' - return `${parseInt(v, 10)}`; - } - if (/\d+-\d+/.test(v)) { - // Arch style, strip /-\d$/ - v = v.split('-')[0]; - } - return v; - }); - versions[i] = version.join('.'); - } - if (is_array) { - return versions; - } else { - return versions[0]; - } - }, - satisfies: require('semver').satisfies + if (/\d+-\d+/.test(v)) { + // Arch style, strip /-\d$/ + v = v.split("-")[0]; + } + return v; + }); + versions[i] = version.join("."); + } + if (is_array) { + return versions; + } else { + return versions[0]; + } +}; + +export { sanitize }; + +export default { + sanitize: sanitize, + satisfies: semver.satisfies, }; diff --git a/packages/core/lib/utils/ssh.js b/packages/core/lib/utils/ssh.js index a37eedabb..73881e02e 100644 --- a/packages/core/lib/utils/ssh.js +++ b/packages/core/lib/utils/ssh.js @@ -1,32 +1,46 @@ +import { merge } from "mixme"; +import connect from "ssh2-connect"; +import { whoami } from "@nikitajs/core/utils/os"; +import { hash as hash_str } from "@nikitajs/core/utils/string"; -const {merge} = require('mixme'); -const {whoami} = require('./os'); -const connect = require('ssh2-connect'); -const {hash} = require('./string'); - -module.exports = ssh = { - compare: function(ssh1, ssh2) { - // Between 2 configurations - const compare_config = function(config1, config2) { - return config1 && config2 && config1.host === config2.host && (config1.port || 22) === (config2.port || 22) && config1.username === config2.username; - }; - if (!ssh1 && !ssh2 && !!ssh1 === !!ssh2) { // 2 null - return true; - } - const config1 = ssh.is(ssh1) ? ssh1.config : merge(ssh1); - const config2 = ssh.is(ssh2) ? ssh2.config : merge(ssh2); - if (config1.username == null) { - config1.username = whoami(); - } - if (config2.username == null) { - config2.username = whoami(); - } - return compare_config(config1, config2); - }, - is: function(ssh) { - return connect.is(ssh); - }, - hash: function(ssh) { - return hash(JSON.stringify(ssh.config)); +const compare = function (ssh1, ssh2) { + // Between 2 configurations + const compare_config = function (config1, config2) { + return ( + config1 && + config2 && + config1.host === config2.host && + (config1.port || 22) === (config2.port || 22) && + config1.username === config2.username + ); + }; + if (!ssh1 && !ssh2 && !!ssh1 === !!ssh2) { + // 2 null + return true; + } + const config1 = this.is(ssh1) ? ssh1.config : merge(ssh1); + const config2 = this.is(ssh2) ? ssh2.config : merge(ssh2); + if (config1.username == null) { + config1.username = whoami(); + } + if (config2.username == null) { + config2.username = whoami(); } + return compare_config(config1, config2); +}; + +const is = function (ssh) { + return connect.is(ssh); +}; + +const hash = function (ssh) { + return hash_str(JSON.stringify(ssh.config)); +}; + +export { compare, is, hash }; + +export default { + compare: compare, + is: is, + hash: hash, }; diff --git a/packages/core/lib/utils/stats.js b/packages/core/lib/utils/stats.js index 338d94507..351df01a1 100644 --- a/packages/core/lib/utils/stats.js +++ b/packages/core/lib/utils/stats.js @@ -1,48 +1,71 @@ -// Generated by CoffeeScript 2.7.0 -var constants; - -module.exports = { - isDirectory: function(mode) { - return (mode & constants.S_IFMT) === constants.S_IFDIR; - }, - isFile: function(mode) { - return (mode & constants.S_IFMT) === constants.S_IFREG; - }, - isBlockDevice: function(mode) { - return (mode & constants.S_IFMT) === constants.S_IFBLK; - }, - isCharacterDevice: function(mode) { - return (mode & constants.S_IFMT) === constants.S_IFCHR; - }, - isSymbolicLink: function(mode) { - return (mode & constants.S_IFMT) === constants.S_IFLNK; - }, - isFIFO: function(mode) { - return (mode & constants.S_IFMT) === constants.S_IFIFO; - }, - isSocket: function(mode) { - return (mode & constants.S_IFMT) === constants.S_IFSOCK; - }, - type: function(mode) { - if (this.isDirectory(mode)) { - return 'Directory'; - } else if (this.isFile(mode)) { - return 'File'; - } else if (this.isBlockDevice(mode)) { - return 'Block Device'; - } else if (this.isCharacterDevice(mode)) { - return 'Character Device'; - } else if (this.isSymbolicLink(mode)) { - return 'Symbolic Link'; - } else if (this.isFIFO(mode)) { - return 'FIFO'; - } else if (this.isSocket(mode)) { - return 'Socket'; - } else { - return 'Unknown'; - } +import { constants } from "fs"; + +const isDirectory = function (mode) { + return (mode & constants.S_IFMT) === constants.S_IFDIR; +}; + +const isFile = function (mode) { + return (mode & constants.S_IFMT) === constants.S_IFREG; +}; + +const isBlockDevice = function (mode) { + return (mode & constants.S_IFMT) === constants.S_IFBLK; +}; + +const isCharacterDevice = function (mode) { + return (mode & constants.S_IFMT) === constants.S_IFCHR; +}; + +const isSymbolicLink = function (mode) { + return (mode & constants.S_IFMT) === constants.S_IFLNK; +}; + +const isFIFO = function (mode) { + return (mode & constants.S_IFMT) === constants.S_IFIFO; +}; + +const isSocket = function (mode) { + return (mode & constants.S_IFMT) === constants.S_IFSOCK; +}; + +const type = function (mode) { + if (this.isDirectory(mode)) { + return "Directory"; + } else if (this.isFile(mode)) { + return "File"; + } else if (this.isBlockDevice(mode)) { + return "Block Device"; + } else if (this.isCharacterDevice(mode)) { + return "Character Device"; + } else if (this.isSymbolicLink(mode)) { + return "Symbolic Link"; + } else if (this.isFIFO(mode)) { + return "FIFO"; + } else if (this.isSocket(mode)) { + return "Socket"; + } else { + return "Unknown"; } }; -//# Dependencies -constants = require('fs').constants; +export { + isDirectory, + isFile, + isBlockDevice, + isCharacterDevice, + isSymbolicLink, + isFIFO, + isSocket, + type, +}; + +export default { + isDirectory: isDirectory, + isFile: isFile, + isBlockDevice: isBlockDevice, + isCharacterDevice: isCharacterDevice, + isSymbolicLink: isSymbolicLink, + isFIFO: isFIFO, + isSocket: isSocket, + type: type, +}; diff --git a/packages/core/lib/utils/string.js b/packages/core/lib/utils/string.js index c2176e4cd..7d6771dae 100644 --- a/packages/core/lib/utils/string.js +++ b/packages/core/lib/utils/string.js @@ -1,101 +1,135 @@ -const crypto = require("crypto"); -const yaml = require("js-yaml"); -const error = require("./error"); +import crypto from "crypto"; +import { snake_case_str } from "mixme"; +import yaml from "js-yaml"; +import error from "@nikitajs/core/utils/error"; -module.exports = { - escapeshellarg: function (arg) { - const result = arg.replace(/'/g, (match) => "'\"'\"'"); - return `'${result}'`; - }, - /* - `string.hash(file, [algorithm], callback)` +/** + * Escape an argument with single quotes. + * + * @param {*} arg + * @returns Single quote escaped argument + */ +const escapeshellarg = function (arg) { + const result = arg.replace(/'/g, (match) => "'\"'\"'"); + return `'${result}'`; +}; - Output the hash of a supplied string in hexadecimal - form. The default algorithm to compute the hash is md5. - */ - hash: function (data, algorithm) { - if (arguments.length === 1) { - algorithm = "md5"; - } - return crypto.createHash(algorithm).update(data).digest("hex"); - }, - repeat: function (str, l) { - return Array(l + 1).join(str); - }, - /* - `string.endsWith(search, [position])` +/* +`string.endsWith(search, [position])` - Determines whether a string ends with the characters of another string, - returning true or false as appropriate. - This method has been added to the ECMAScript 6 specification and its code - was borrowed from [Mozilla](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith) - */ - endsWith: function (str, search, position) { - position = position || str.length; - position = position - search.length; - const lastIndex = str.lastIndexOf(search); - return lastIndex !== -1 && lastIndex === position; - }, - format: async function (data, format, args = {}) { - const esa = this.escapeshellarg; - const lines = this.lines; - if (typeof format === "function") { - try { - return await format({ - data: data, - ...args, - }); - } catch (err) { - throw error("NIKITA_UTILS_STRING_FORMAT_UDF_FAILURE", [ - "failed to format output with a user defined function,", - `original error message is ${esa(err.message)}.`, - ]); - } - } else { - try { - return (function () { - switch (format) { - case "json": - return JSON.parse(data); - case "jsonlines": - return lines(data.trim()).map(JSON.parse); - case "yaml": - return yaml.load(data); - } - })(); - } catch (err) { - throw error("NIKITA_UTILS_STRING_FORMAT_PARSING_FAILURE", [ - "failed to parse output,", - `format is ${JSON.stringify(format)},`, - `original error message is ${JSON.stringify(err.message)}.`, - ]); - } - } - }, - lines: function (str) { - return str.split(/\r\n|[\n\r\u0085\u2028\u2029]/g); - }, - max: function (str, max) { - if (str.length > max) { - return str.slice(0, max) + "…"; - } else { - return str; - } - }, - print_time: function (time) { - if (time > 1000 * 60) { - `${time / 1000}m`; +Determines whether a string ends with the characters of another string, +returning true or false as appropriate. +This method has been added to the ECMAScript 6 specification and its code +was borrowed from [Mozilla](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith) +*/ +const endsWith = function (str, search, position) { + position = position || str.length; + position = position - search.length; + const lastIndex = str.lastIndexOf(search); + return lastIndex !== -1 && lastIndex === position; +}; + +const format = async function (data, format, args = {}) { + const esa = this.escapeshellarg; + const lines = this.lines; + if (typeof format === "function") { + try { + return await format({ + data: data, + ...args, + }); + } catch (err) { + throw error("NIKITA_UTILS_STRING_FORMAT_UDF_FAILURE", [ + "failed to format output with a user defined function,", + `original error message is ${esa(err.message)}.`, + ]); } - if (time > 1000) { - return `${time / 1000}s`; - } else { - return `${time}ms`; + } else { + try { + return (function () { + switch (format) { + case "json": + return JSON.parse(data); + case "jsonlines": + return lines(data) + .filter((line) => line.trim() !== "") + .map(JSON.parse); + case "yaml": + return yaml.load(data); + } + })(); + } catch (err) { + throw error("NIKITA_UTILS_STRING_FORMAT_PARSING_FAILURE", [ + "failed to parse output,", + `format is ${JSON.stringify(format)},`, + `original error message is ${JSON.stringify(err.message)}.`, + ]); } - }, - snake_case: function (str) { - return str - .replace(/([a-z\d])([A-Z]+)/g, "$1_$2") - .replace(/[-\s]+/g, "_") - .toLowerCase(); - }, + } +}; + +/* +`string.hash(file, [algorithm], callback)` + +Output the hash of a supplied string in hexadecimal +form. The default algorithm to compute the hash is md5. +*/ +const hash = function (data, algorithm) { + if (arguments.length === 1) { + algorithm = "md5"; + } + return crypto.createHash(algorithm).update(data).digest("hex"); +}; + +const lines = function (str) { + return str.split(/\r\n|[\n\r\u0085\u2028\u2029]/g); +}; + +const max = function (str, max) { + if (str.length > max) { + return str.slice(0, max) + "…"; + } else { + return str; + } +}; + +const print_time = function (time) { + if (time > 1000 * 60) { + `${time / 1000}m`; + } + if (time > 1000) { + return `${time / 1000}s`; + } else { + return `${time}ms`; + } +}; + +const repeat = function (str, l) { + return Array(l + 1).join(str); +}; + +const snake_case = snake_case_str; + +export { + escapeshellarg, + endsWith, + format, + hash, + lines, + max, + print_time, + repeat, + snake_case, +}; + +export default { + escapeshellarg: escapeshellarg, + endsWith: endsWith, + format: format, + hash: hash, + lines: lines, + max: max, + print_time: print_time, + repeat: repeat, + snake_case: snake_case, }; diff --git a/packages/core/lib/utils/tilde.js b/packages/core/lib/utils/tilde.js index f3695a958..e168bef67 100644 --- a/packages/core/lib/utils/tilde.js +++ b/packages/core/lib/utils/tilde.js @@ -1,6 +1,5 @@ - -const tilde = require('tilde-expansion'); -const path = require('path'); +import tilde from "tilde-expansion"; +import path from "path"; /* Not, those function are not aware of an SSH connection @@ -8,17 +7,24 @@ and can't use `path.posix` when appropriate over SSH. It could be assumed that a path starting with `~` is always posix but this is not yet handled and tested. */ -module.exports = { - normalize: function(location) { - return new Promise(function(accept, reject) { - return tilde(location, function(location) { - return accept(path.normalize(location)); - }); + +const normalize = function (location) { + return new Promise(function (accept, reject) { + return tilde(location, function (location) { + return accept(path.normalize(location)); }); - }, - resolve: async function(...locations) { - const normalized = locations.map(module.exports.normalize) - const paths = (await Promise.all(normalized)); - return path.resolve(...paths); - } + }); +}; + +const resolve = async function (...locations) { + const normalized = locations.map(normalize); + const paths = await Promise.all(normalized); + return path.resolve(...paths); +}; + +export { normalize, resolve }; + +export default { + normalize: normalize, + resolve: resolve, }; diff --git a/packages/core/package.json b/packages/core/package.json index c423447dc..e106645a2 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -76,7 +76,7 @@ "self-templated": "^0.2.3", "semver": "^7.3.7", "ssh2-connect": "^3.4.1", - "ssh2-exec": "^0.7.3", + "ssh2-exec": "^0.7.6", "ssh2-fs": "^1.1.2", "stack-trace": "^0.0.10", "tilde-expansion": "^0.0.0", @@ -96,6 +96,12 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + ".": "./lib/index.js", + "./actions/*": "./lib/actions/*/index.js", + "./utils": "./lib/utils/index.js", + "./*": "./lib/*.js" + }, "files": [ "/lib" ], @@ -104,17 +110,16 @@ }, "homepage": "https://nikita.js.org/", "license": "MIT", - "main": "./lib/index", "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register" - ], "inline-diffs": true, - "timeout": 30000, + "loader": "./test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "should" + ], + "throw-deprecation": true, + "timeout": 10000 }, "repository": { "type": "git", @@ -127,5 +132,6 @@ "test": "npm run test:local && npm run test:env", "test:env": "env/run.sh", "test:local": "mocha --node-flags '--unhandled-rejections=strict' 'test/**/*.coffee'" - } + }, + "type": "module" } diff --git a/packages/core/test.sample.coffee b/packages/core/test.sample.coffee index 554f557d4..d096ee819 100644 --- a/packages/core/test.sample.coffee +++ b/packages/core/test.sample.coffee @@ -1,10 +1,10 @@ -module.exports = +export default tags: api: true conditions_if_os: false chown: false - ssh: false + ssh: true sudo: false posix: true system_execute_arc_chroot: false @@ -18,5 +18,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/core/test/actions/assert.coffee b/packages/core/test/actions/assert.coffee index 7877fd38d..2157a7839 100644 --- a/packages/core/test/actions/assert.coffee +++ b/packages/core/test/actions/assert.coffee @@ -1,10 +1,10 @@ -{tags} = require '../test' -nikita = require '../../lib' -registry = require '../../lib/registry' +import nikita from '@nikitajs/core' +import registry from '@nikitajs/core/registry' +import test from '../test.coffee' describe 'actions.assert', -> - return unless tags.api + return unless test.tags.api describe 'returned value', -> diff --git a/packages/core/test/actions/call.coffee b/packages/core/test/actions/call.coffee index 6320af308..3360305d5 100644 --- a/packages/core/test/actions/call.coffee +++ b/packages/core/test/actions/call.coffee @@ -1,11 +1,12 @@ -nikita = require '../../lib' -registry = require '../../lib/registry' -{tags, config} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import registry from '@nikitajs/core/registry' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.call', -> - return unless tags.api + return unless test.tags.api it 'call action from global registry', -> try diff --git a/packages/core/test/actions/execute/assert.coffee b/packages/core/test/actions/execute/assert.coffee index ba5174789..ad635be87 100644 --- a/packages/core/test/actions/execute/assert.coffee +++ b/packages/core/test/actions/execute/assert.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.execute.assert', -> - return unless tags.posix + return unless test.tags.posix describe 'schema', -> diff --git a/packages/core/test/actions/execute/config.arch_linux.coffee b/packages/core/test/actions/execute/config.arch_linux.coffee index 4734e8a1e..16c419c7a 100644 --- a/packages/core/test/actions/execute/config.arch_linux.coffee +++ b/packages/core/test/actions/execute/config.arch_linux.coffee @@ -1,15 +1,16 @@ -nikita = require '../../../lib' -utils = require '../../../lib/utils' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.execute.config.arch_linux', -> - return unless tags.system_execute_arc_chroot + return unless test.tags.system_execute_arc_chroot describe 'schema', -> - it 'arch_chroot require arch_chroot_rootdir', -> + it 'arch_chroot requires arch_chroot_rootdir', -> nikita.execute arch_chroot: true command: '' diff --git a/packages/core/test/actions/execute/config.bash.coffee b/packages/core/test/actions/execute/config.bash.coffee index e9b303ec7..ce7a15818 100644 --- a/packages/core/test/actions/execute/config.bash.coffee +++ b/packages/core/test/actions/execute/config.bash.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.execute.config.bash', -> - return unless tags.posix + return unless test.tags.posix they 'in generated path', ({ssh}) -> nikita diff --git a/packages/core/test/actions/execute/config.code.coffee b/packages/core/test/actions/execute/config.code.coffee index 3c887aba8..f3f59e18a 100644 --- a/packages/core/test/actions/execute/config.code.coffee +++ b/packages/core/test/actions/execute/config.code.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.execute.config.code', -> - return unless tags.posix + return unless test.tags.posix describe 'normalization', -> diff --git a/packages/core/test/actions/execute/config.cwd.coffee b/packages/core/test/actions/execute/config.cwd.coffee index 28d3ec91f..77bd952f3 100644 --- a/packages/core/test/actions/execute/config.cwd.coffee +++ b/packages/core/test/actions/execute/config.cwd.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.execute.config.cwd', -> - return unless tags.system_execute_arc_chroot + return unless test.tags.system_execute_arc_chroot they 'execute in the context of directory', ({ssh}) -> nikita diff --git a/packages/core/test/actions/execute/config.env.coffee b/packages/core/test/actions/execute/config.env.coffee index 8cdcab65f..c8901c980 100644 --- a/packages/core/test/actions/execute/config.env.coffee +++ b/packages/core/test/actions/execute/config.env.coffee @@ -1,11 +1,12 @@ -stream = require 'stream' -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import stream from 'node:stream' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.execute.config.env', -> - return unless tags.posix + return unless test.tags.posix they 'invalid schema', ({ssh}) -> nikita $ssh: ssh, -> diff --git a/packages/core/test/actions/execute/config.env_export.coffee b/packages/core/test/actions/execute/config.env_export.coffee index a8d2be4f7..b1f03aace 100644 --- a/packages/core/test/actions/execute/config.env_export.coffee +++ b/packages/core/test/actions/execute/config.env_export.coffee @@ -1,11 +1,12 @@ -stream = require 'stream' -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import stream from 'node:stream' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.execute.config.env_export', -> - return unless tags.posix + return unless test.tags.posix they 'env in execute action', ({ssh}) -> nikita $ssh: ssh, -> diff --git a/packages/core/test/actions/execute/config.format.coffee b/packages/core/test/actions/execute/config.format.coffee index 6479b025e..65a912512 100644 --- a/packages/core/test/actions/execute/config.format.coffee +++ b/packages/core/test/actions/execute/config.format.coffee @@ -1,12 +1,13 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.execute.config.format', -> - return unless tags.posix + return unless test.tags.posix - describe 'function', -> + describe 'udf', -> they 'return user value', ({ssh}) -> nikita @@ -30,7 +31,7 @@ describe 'actions.execute.config.format', -> 'failed to format output with a user defined function, original error message is \'catchme\'.' ].join ' ' - describe 'enum', -> + describe 'constant', -> they 'yaml', ({ssh}) -> nikita @@ -46,18 +47,19 @@ describe 'actions.execute.config.format', -> stdout.should.eql 'key: value\n' data.should.eql key: "value" - they 'with error', ({ssh}) -> + they 'json with error', ({ssh}) -> nikita $ssh: ssh , -> - {stdout, data} = await @execute + {stdout, stderr, data} = await @execute command: 'exit 1' format: 'json' code: [, 1] stdout.should.eql '' + stderr.should.eql '' should.not.exist(data) - they 'parsing error', ({ssh}) -> + they 'json parsing error', ({ssh}) -> nikita $ssh: ssh , -> @@ -69,4 +71,16 @@ describe 'actions.execute.config.format', -> 'failed to parse output, format is "json",' 'original error message is "Unexpected token \'i\', \\"invalid\\n\\" is not valid JSON".' ].join ' ' + + they 'jsonline empty', ({ssh}) -> + nikita + $ssh: ssh + , -> + {stdout, stderr, data} = await @execute + command: 'echo -n ""' + format: 'jsonlines' + bash: true + stdout.should.eql '' + stderr.should.eql '' + data.should.eql [] diff --git a/packages/core/test/actions/execute/config.stdio.coffee b/packages/core/test/actions/execute/config.stdio.coffee index 81b0cd433..1dbfc2e8e 100644 --- a/packages/core/test/actions/execute/config.stdio.coffee +++ b/packages/core/test/actions/execute/config.stdio.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.execute.config.stdio', -> - return unless tags.posix + return unless test.tags.posix it 'invalid', -> nikita.execute diff --git a/packages/core/test/actions/execute/config.sudo.coffee b/packages/core/test/actions/execute/config.sudo.coffee index 2cf2d8fc8..fb3d2ebab 100644 --- a/packages/core/test/actions/execute/config.sudo.coffee +++ b/packages/core/test/actions/execute/config.sudo.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.execute.config.sudo', -> - return unless tags.sudo + return unless test.tags.sudo they 'execute.assert', ({ssh}) -> nikita diff --git a/packages/core/test/actions/execute/index.coffee b/packages/core/test/actions/execute/index.coffee index 840eab328..0525c9ee4 100644 --- a/packages/core/test/actions/execute/index.coffee +++ b/packages/core/test/actions/execute/index.coffee @@ -1,11 +1,12 @@ -stream = require 'stream' -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import stream from 'node:stream' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.execute', -> - return unless tags.posix + return unless test.tags.posix describe 'config `command`', -> @@ -54,8 +55,6 @@ describe 'actions.execute', -> out._write = (chunk, encoding, callback) -> data += chunk.toString() callback() - search1 = 'search_toto' - search2 = 'search_lulu' unpiped = 0 out.on 'unpipe', -> unpiped++ @@ -63,16 +62,23 @@ describe 'actions.execute', -> false.should.be.true() await nikita $ssh: ssh - , (->) - .execute - command: "cat #{__filename} | grep #{search1}" - stdout: out - .execute - command: "cat #{__filename} | grep #{search2}" - stdout: out + $tmpdir: true + , ({metadata: {tmpdir}}) -> + await @fs.base.writeFile + content: ''' + Test search_1. + Test search_2. + ''' + target: "#{tmpdir}/a_file" + await @execute + command: "cat #{tmpdir}/a_file | grep search_1" + stdout: out + await @execute + command: "cat #{tmpdir}/a_file | grep search_2" + stdout: out unpiped.should.eql 2 - data.should.containEql search1 - data.should.containEql search2 + data.should.containEql 'search_1' + data.should.containEql 'search_2' they 'stdout and stderr return empty on command error', ({ssh}) -> nikita @@ -87,29 +93,31 @@ describe 'actions.execute', -> describe 'trim', -> they 'both stdout and stderr', ({ssh}) -> - nikita $ssh: ssh, -> - @execute - command: """ - echo ' bonjour ' - echo ' monde ' >&2 - """ - trim: true - .should.be.finally.containEql - stdout: 'bonjour' - stderr: 'monde' + nikita + $ssh: ssh + .execute + command: """ + echo ' bonjour ' + echo ' monde ' >&2 + """ + trim: true + .should.be.finally.containEql + stdout: 'bonjour' + stderr: 'monde' they 'with trim_stdout and trim_stderr', ({ssh}) -> - nikita $ssh: ssh, -> - @execute - command: """ - echo ' bonjour ' - echo ' monde ' >&2 - """ - stdout_trim: true - stderr_trim: true - .should.be.finally.containEql - stdout: 'bonjour' - stderr: 'monde' + nikita + $ssh: ssh + .execute + command: """ + echo ' bonjour ' + echo ' monde ' >&2 + """ + stdout_trim: true + stderr_trim: true + .should.be.finally.containEql + stdout: 'bonjour' + stderr: 'monde' describe 'log', -> @@ -152,12 +160,12 @@ describe 'actions.execute', -> they 'trap on error', ({ssh}) -> nikita $ssh: ssh, -> - @execute + await @execute command: """ sh -c '>&2 echo "exit 2' echo 'ok' """ - @execute + await @execute command: """ sh -c '>&2 echo "exit 2' echo 'ok' @@ -168,31 +176,33 @@ describe 'actions.execute', -> describe 'error', -> they 'provide `stdout` and `stderr`', ({ssh}) -> - nikita $ssh: ssh, -> - @execute - command: """ - sh -c '>&2 echo "Some Error"; exit 2' - """ - .should.be.rejectedWith - code: 'NIKITA_EXECUTE_EXIT_CODE_INVALID' - message: [ - 'NIKITA_EXECUTE_EXIT_CODE_INVALID: an unexpected exit code was encountered,' - 'command is "sh -c \'>&2 echo \\"Some Error\\"; exit 2\'",' - 'got 2 instead of {"true":[0],"false":[]}.' - ].join ' ' - command: 'sh -c \'>&2 echo "Some Error"; exit 2\'' - exit_code: 2 - stdout: '' - stderr: 'Some Error\n' - $status: false + nikita + $ssh: ssh + .execute + command: """ + sh -c '>&2 echo "Some Error"; exit 2' + """ + .should.be.rejectedWith + code: 'NIKITA_EXECUTE_EXIT_CODE_INVALID' + message: [ + 'NIKITA_EXECUTE_EXIT_CODE_INVALID: an unexpected exit code was encountered,' + 'command is "sh -c \'>&2 echo \\"Some Error\\"; exit 2\'",' + 'got 2 instead of {"true":[0],"false":[]}.' + ].join ' ' + command: 'sh -c \'>&2 echo "Some Error"; exit 2\'' + exit_code: 2 + stdout: '' + stderr: 'Some Error\n' + $status: false describe 'dry', -> they 'dont execute the command', ({ssh}) -> - res = await nikita.execute - command: "exit 1" - dry: true - res.should.match + ( + await nikita.execute + command: "exit 1" + dry: true + ).should.match stdout: [] stderr: [] code: null diff --git a/packages/core/test/actions/execute/wait.coffee b/packages/core/test/actions/execute/wait.coffee index 89c872d25..e8c448cdd 100644 --- a/packages/core/test/actions/execute/wait.coffee +++ b/packages/core/test/actions/execute/wait.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.execute.wait', -> - return unless tags.posix + return unless test.tags.posix they 'single command, status false', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/assert.coffee b/packages/core/test/actions/fs/assert.coffee index 516656ba3..0d361ccf2 100644 --- a/packages/core/test/actions/fs/assert.coffee +++ b/packages/core/test/actions/fs/assert.coffee @@ -1,12 +1,12 @@ -fs = require 'fs' -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import fs from 'node:fs' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.assert', -> + return unless test.tags.posix describe 'schema', -> diff --git a/packages/core/test/actions/fs/base/chmod.coffee b/packages/core/test/actions/fs/base/chmod.coffee index e32c60843..42698c523 100644 --- a/packages/core/test/actions/fs/base/chmod.coffee +++ b/packages/core/test/actions/fs/base/chmod.coffee @@ -1,13 +1,14 @@ -nikita = require '../../../../lib' -utils = require '../../../../lib/utils' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.chmod', -> describe 'schema', -> - return unless tags.api + return unless test.tags.api it 'absolute mode', -> nikita.fs.base.chmod @@ -31,7 +32,7 @@ describe 'actions.fs.base.chmod', -> config.mode.should.eql 0o0744 describe 'usage', -> - return unless tags.posix + return unless test.tags.posix they 'create', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/chown.coffee b/packages/core/test/actions/fs/base/chown.coffee index 966922ae7..f8f76b323 100644 --- a/packages/core/test/actions/fs/base/chown.coffee +++ b/packages/core/test/actions/fs/base/chown.coffee @@ -1,13 +1,14 @@ -nikita = require '../../../../lib' -utils = require '../../../../lib/utils' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.chown', -> describe 'schema', -> - return unless tags.api + return unless test.tags.api it 'id integers', -> nikita.fs.base.chown @@ -37,7 +38,7 @@ describe 'actions.fs.base.chown', -> config.gid.should.eql 5678 describe 'usage', -> - return unless tags.chown + return unless test.tags.chown they 'pass id integers', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/copy.coffee b/packages/core/test/actions/fs/base/copy.coffee index 63a5abe2b..c2e951177 100644 --- a/packages/core/test/actions/fs/base/copy.coffee +++ b/packages/core/test/actions/fs/base/copy.coffee @@ -1,11 +1,11 @@ -nikita = require '../../../../lib' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.copy', -> + return unless test.tags.posix they 'a file to a directory', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/createReadStream.coffee b/packages/core/test/actions/fs/base/createReadStream.coffee index 93578790d..66e3a1ecf 100644 --- a/packages/core/test/actions/fs/base/createReadStream.coffee +++ b/packages/core/test/actions/fs/base/createReadStream.coffee @@ -1,11 +1,11 @@ -nikita = require '../../../../lib' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.createReadStream', -> + return unless test.tags.posix they 'option on_readable', ({ssh}) -> buffers = [] diff --git a/packages/core/test/actions/fs/base/createReadStream.sudo.coffee b/packages/core/test/actions/fs/base/createReadStream.sudo.coffee index 621cfc6b0..697e2a1cc 100644 --- a/packages/core/test/actions/fs/base/createReadStream.sudo.coffee +++ b/packages/core/test/actions/fs/base/createReadStream.sudo.coffee @@ -1,12 +1,12 @@ -nikita = require '../../../../lib' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) -exec = require 'ssh2-exec/promise' - -return unless tags.sudo +import exec from 'ssh2-exec/promises' +import nikita from '@nikitajs/core' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.createReadStream.sudo', -> + return unless test.tags.sudo they 'read file', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/createWriteStream.coffee b/packages/core/test/actions/fs/base/createWriteStream.coffee index a7ad697d8..272f0d485 100644 --- a/packages/core/test/actions/fs/base/createWriteStream.coffee +++ b/packages/core/test/actions/fs/base/createWriteStream.coffee @@ -1,12 +1,12 @@ -nikita = require '../../../../lib' -utils = require '../../../../lib/utils' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.createWriteStream', -> + return unless test.tags.posix describe 'validation', -> diff --git a/packages/core/test/actions/fs/base/createWriteStream.sudo.coffee b/packages/core/test/actions/fs/base/createWriteStream.sudo.coffee index 4a532550f..9ffc0a6b0 100644 --- a/packages/core/test/actions/fs/base/createWriteStream.sudo.coffee +++ b/packages/core/test/actions/fs/base/createWriteStream.sudo.coffee @@ -1,45 +1,45 @@ -nikita = require '../../../../lib' -utils = require '../../../../lib/utils' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) -exec = require 'ssh2-exec/promise' - -return unless tags.sudo +import exec from 'ssh2-exec/promises' +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.createWriteStream.sudo', -> + return unless test.tags.sudo - they 'write a file', ({ssh}) -> - nikita - $ssh: ssh - $sudo: true - $tmpdir: true - , ({metadata: {tmpdir}, ssh}) -> - await @fs.base.createWriteStream - target: "#{tmpdir}/a_file" - stream: (ws) -> - ws.write 'hello' - ws.end() - exec ssh, "sudo cat #{tmpdir}/a_file" - .should.be.finally.match stdout: 'hello' + they 'write a file', ({ssh}) -> + nikita + $ssh: ssh + $sudo: true + $tmpdir: true + , ({metadata: {tmpdir}, ssh}) -> + await @fs.base.createWriteStream + target: "#{tmpdir}/a_file" + stream: (ws) -> + ws.write 'hello' + ws.end() + exec ssh, "sudo cat #{tmpdir}/a_file" + .should.be.finally.match stdout: 'hello' - they 'append a file', ({ssh}) -> - nikita - $ssh: ssh - $sudo: true - $tmpdir: true - , ({metadata: {tmpdir}, ssh}) -> - await @fs.base.createWriteStream - target: "#{tmpdir}/a_file" - stream: (ws) -> - ws.write 'hello' - ws.end() - await @fs.base.createWriteStream - flags: 'a' - target: "#{tmpdir}/a_file" - stream: (ws) -> - ws.write '...nikita' - ws.end() - exec ssh, "sudo cat #{tmpdir}/a_file" - .should.be.finally.match stdout: 'hello...nikita' - + they 'append a file', ({ssh}) -> + nikita + $ssh: ssh + $sudo: true + $tmpdir: true + , ({metadata: {tmpdir}, ssh}) -> + await @fs.base.createWriteStream + target: "#{tmpdir}/a_file" + stream: (ws) -> + ws.write 'hello' + ws.end() + await @fs.base.createWriteStream + flags: 'a' + target: "#{tmpdir}/a_file" + stream: (ws) -> + ws.write '...nikita' + ws.end() + exec ssh, "sudo cat #{tmpdir}/a_file" + .should.be.finally.match stdout: 'hello...nikita' + diff --git a/packages/core/test/actions/fs/base/exists.coffee b/packages/core/test/actions/fs/base/exists.coffee index 2855d4008..a142c22aa 100644 --- a/packages/core/test/actions/fs/base/exists.coffee +++ b/packages/core/test/actions/fs/base/exists.coffee @@ -1,11 +1,11 @@ -nikita = require '../../../../lib' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.exists', -> + return unless test.tags.posix they 'does not exists', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/lstat.coffee b/packages/core/test/actions/fs/base/lstat.coffee index a5a6b1f52..744fcaffd 100644 --- a/packages/core/test/actions/fs/base/lstat.coffee +++ b/packages/core/test/actions/fs/base/lstat.coffee @@ -1,12 +1,12 @@ -nikita = require '../../../../lib' -utils = require '../../../../lib/utils' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.lstat', -> + return unless test.tags.posix they 'with a file link', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/mkdir.coffee b/packages/core/test/actions/fs/base/mkdir.coffee index 399f9ed1d..15241b2d4 100644 --- a/packages/core/test/actions/fs/base/mkdir.coffee +++ b/packages/core/test/actions/fs/base/mkdir.coffee @@ -1,12 +1,12 @@ -nikita = require '../../../../lib' -utils = require '../../../../lib/utils' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.mkdir', -> + return unless test.tags.posix they 'a new directory', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/readFile.coffee b/packages/core/test/actions/fs/base/readFile.coffee index bf8b9b7d6..b9a34ce9a 100644 --- a/packages/core/test/actions/fs/base/readFile.coffee +++ b/packages/core/test/actions/fs/base/readFile.coffee @@ -1,12 +1,12 @@ -fs = require 'ssh2-fs' -nikita = require '../../../../lib' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import fs from 'ssh2-fs' +import nikita from '@nikitajs/core' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.readFile', -> + return unless test.tags.posix they 'argument `target`', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/readdir.coffee b/packages/core/test/actions/fs/base/readdir.coffee index ec6cd4810..78dee320f 100644 --- a/packages/core/test/actions/fs/base/readdir.coffee +++ b/packages/core/test/actions/fs/base/readdir.coffee @@ -1,13 +1,13 @@ -nikita = require '../../../../lib' -fs = require('fs').promises -{Dirent} = require 'fs' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import fs from 'node:fs/promises' +import {Dirent} from 'node:fs' +import nikita from '@nikitajs/core' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.readdir', -> + return unless test.tags.posix it 'get native behavior', -> nikita diff --git a/packages/core/test/actions/fs/base/readlink.coffee b/packages/core/test/actions/fs/base/readlink.coffee index 458525b24..6aaee4523 100644 --- a/packages/core/test/actions/fs/base/readlink.coffee +++ b/packages/core/test/actions/fs/base/readlink.coffee @@ -1,11 +1,11 @@ -nikita = require '../../../../lib' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.readlink', -> + return unless test.tags.posix they 'get value', ({ssh}) -> await nikita diff --git a/packages/core/test/actions/fs/base/rename.coffee b/packages/core/test/actions/fs/base/rename.coffee index 5b737d2a8..da77aebbc 100644 --- a/packages/core/test/actions/fs/base/rename.coffee +++ b/packages/core/test/actions/fs/base/rename.coffee @@ -1,12 +1,13 @@ -nikita = require '../../../../lib' -utils = require '../../../../lib/utils' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) -return unless tags.posix describe 'actions.fs.base.rename', -> + return unless test.tags.posix they 'create', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/rmdir.coffee b/packages/core/test/actions/fs/base/rmdir.coffee index b46e187d7..ae443f190 100644 --- a/packages/core/test/actions/fs/base/rmdir.coffee +++ b/packages/core/test/actions/fs/base/rmdir.coffee @@ -1,12 +1,12 @@ -fs = require 'ssh2-fs' -nikita = require '../../../../lib' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import fs from 'ssh2-fs' +import nikita from '@nikitajs/core' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.rmdir', -> + return unless test.tags.posix they 'dir is removed', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/stat.coffee b/packages/core/test/actions/fs/base/stat.coffee index c4c32ff9c..77bf482d2 100644 --- a/packages/core/test/actions/fs/base/stat.coffee +++ b/packages/core/test/actions/fs/base/stat.coffee @@ -1,12 +1,12 @@ -nikita = require '../../../../lib' -utils = require '../../../../lib/utils' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.stat', -> + return unless test.tags.posix they 'NIKITA_FS_STAT_TARGET_ENOENT target does not exists', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/symlink.coffee b/packages/core/test/actions/fs/base/symlink.coffee index 7c8bbdcf5..aa685aa21 100644 --- a/packages/core/test/actions/fs/base/symlink.coffee +++ b/packages/core/test/actions/fs/base/symlink.coffee @@ -1,12 +1,12 @@ -nikita = require '../../../../lib' -utils = require '../../../../lib/utils' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.symlink', -> + return unless test.tags.posix they 'create', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/unlink.coffee b/packages/core/test/actions/fs/base/unlink.coffee index 5388fa3b4..bf9045a7f 100644 --- a/packages/core/test/actions/fs/base/unlink.coffee +++ b/packages/core/test/actions/fs/base/unlink.coffee @@ -1,12 +1,12 @@ -nikita = require '../../../../lib' -utils = require '../../../../lib/utils' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.unlink', -> + return unless test.tags.posix they 'a file', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/writeFile.coffee b/packages/core/test/actions/fs/base/writeFile.coffee index 7442894d2..f2a52fa31 100644 --- a/packages/core/test/actions/fs/base/writeFile.coffee +++ b/packages/core/test/actions/fs/base/writeFile.coffee @@ -1,11 +1,11 @@ -nikita = require '../../../../lib' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.writeFile', -> + return unless test.tags.posix they 'content is a string', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/base/writeFile.sudo.coffee b/packages/core/test/actions/fs/base/writeFile.sudo.coffee index 7b64818e7..b4d9fc982 100644 --- a/packages/core/test/actions/fs/base/writeFile.sudo.coffee +++ b/packages/core/test/actions/fs/base/writeFile.sudo.coffee @@ -1,11 +1,11 @@ -nikita = require '../../../../lib' -{tags, config} = require '../../../test' -they = require('mocha-they')(config) - +import nikita from '@nikitajs/core' +import test from '../../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.base.writeFile.sudo', -> - return unless tags.sudo + return unless test.tags.sudo they 'owner is root', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/chmod.coffee b/packages/core/test/actions/fs/chmod.coffee index 62d8e5fbe..0a5eca08f 100644 --- a/packages/core/test/actions/fs/chmod.coffee +++ b/packages/core/test/actions/fs/chmod.coffee @@ -1,13 +1,14 @@ -nikita = require '../../../lib' -utils = require '../../../lib/utils' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.chmod', -> describe 'usage', -> - return unless tags.posix + return unless test.tags.posix they 'change a permission of a file', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/chown.coffee b/packages/core/test/actions/fs/chown.coffee index 8a8bffcea..910b77477 100644 --- a/packages/core/test/actions/fs/chown.coffee +++ b/packages/core/test/actions/fs/chown.coffee @@ -1,13 +1,14 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.chown', -> describe 'schema', -> - return unless tags.api + return unless test.tags.api it 'require target', -> nikita.fs.chown() @@ -21,7 +22,7 @@ describe 'actions.fs.chown', -> describe 'usage', -> - return unless tags.chown + return unless test.tags.chown they 'throw error if target does not exists', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/copy.coffee b/packages/core/test/actions/fs/copy.coffee index 0d3f59cee..1064944af 100644 --- a/packages/core/test/actions/fs/copy.coffee +++ b/packages/core/test/actions/fs/copy.coffee @@ -1,11 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.copy', -> + return unless test.tags.posix describe 'api', -> diff --git a/packages/core/test/actions/fs/glob.coffee b/packages/core/test/actions/fs/glob.coffee index d04fe38c1..43964b704 100644 --- a/packages/core/test/actions/fs/glob.coffee +++ b/packages/core/test/actions/fs/glob.coffee @@ -1,12 +1,13 @@ -path = require 'path' -{Minimatch} = require 'minimatch' -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import path from 'node:path' +import minimatch from 'minimatch' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.glob', -> - return unless tags.posix + return unless test.tags.posix they 'argument is converted to target', ({ssh}) -> nikita @@ -52,7 +53,7 @@ describe 'actions.fs.glob', -> "#{tmpdir}/test/a_file" ] # Default behavior - (new Minimatch('/a_dir/**').match '/a_dir/').should.be.true() + (new minimatch.Minimatch('/a_dir/**').match '/a_dir/').should.be.true() they 'should match an extension patern', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/hash.coffee b/packages/core/test/actions/fs/hash.coffee index c4137fdc1..da533cabf 100644 --- a/packages/core/test/actions/fs/hash.coffee +++ b/packages/core/test/actions/fs/hash.coffee @@ -1,11 +1,12 @@ -crypto = require 'crypto' -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import crypto from 'node:crypto' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.hash', -> - return unless tags.posix + return unless test.tags.posix they 'error if target does not exist', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/link.coffee b/packages/core/test/actions/fs/link.coffee index 3a3b80eb2..1ee61f12f 100644 --- a/packages/core/test/actions/fs/link.coffee +++ b/packages/core/test/actions/fs/link.coffee @@ -1,11 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.link', -> + return unless test.tags.posix describe 'validation', -> @@ -39,12 +39,15 @@ describe 'actions.fs.link', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> + await @fs.base.writeFile + content: 'hello' + target: "#{tmpdir}/source_file" {$status} = await @fs.link # Link does not exist - source: __filename + source: "#{tmpdir}/source_file" target: "#{tmpdir}/link_test" $status.should.be.true() {$status} = await @fs.link # Link already exists - source: __filename + source: "#{tmpdir}/source_file" target: "#{tmpdir}/link_test" $status.should.be.false() @fs.assert @@ -56,22 +59,25 @@ describe 'actions.fs.link', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> + await @fs.base.writeFile + content: 'hello' + target: "#{tmpdir}/source_file" {$status} = await @fs.link - source: __filename - target: "#{tmpdir}/test" exec: true + source: "#{tmpdir}/source_file" + target: "#{tmpdir}/test" $status.should.be.true() {$status} = await @fs.link - source: __filename - target: "#{tmpdir}/test" exec: true + source: "#{tmpdir}/source_file" + target: "#{tmpdir}/test" $status.should.be.false() @fs.assert - target: "#{tmpdir}/test" content: """ #!/bin/bash - exec #{__filename} $@ + exec #{tmpdir}/source_file $@ """ + target: "#{tmpdir}/test" trim: true they 'should link dir', ({ssh}) -> @@ -80,13 +86,14 @@ describe 'actions.fs.link', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> + await @fs.mkdir "#{tmpdir}/source_dir" {$status} = await @fs.link # Link does not exist - source: __dirname + source: "#{tmpdir}/source_dir" target: "#{tmpdir}/link_test" $status.should.be.true() {$status} = await @fs.link # Link already exists $ssh: ssh - source: __dirname + source: "#{tmpdir}/source_dir" target: "#{tmpdir}/link_test" $status.should.be.false() @fs.assert @@ -99,8 +106,9 @@ describe 'actions.fs.link', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> + await @fs.mkdir "#{tmpdir}/source_dir" {$status} = await @fs.link - source: __dirname + source: "#{tmpdir}/source_dir" target: "#{tmpdir}/test/dir/link_test" $status.should.be.true() await @fs.assert @@ -108,12 +116,12 @@ describe 'actions.fs.link', -> type: 'symlink' {$status} = await @fs.link $ssh: ssh - source: "#{__dirname}/merge.coffee" + source: "#{tmpdir}/source_dir/merge.coffee" target: "#{tmpdir}/test/dir2/merge.coffee" $status.should.be.true() {$status} = await @fs.link $ssh: ssh - source: "#{__dirname}/mkdir.coffee" + source: "#{tmpdir}/source_dir/mkdir.coffee" target: "#{tmpdir}/test/dir2/mkdir.coffee" $status.should.be.true() diff --git a/packages/core/test/actions/fs/mkdir.coffee b/packages/core/test/actions/fs/mkdir.coffee index c0a0cb48c..9681f6e59 100644 --- a/packages/core/test/actions/fs/mkdir.coffee +++ b/packages/core/test/actions/fs/mkdir.coffee @@ -1,12 +1,13 @@ -path = require 'path' -nikita = require '../../../lib' -utils = require '../../../lib/utils' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import path from 'node:path' +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.mkdir', -> - return unless tags.posix + return unless test.tags.posix they 'argument', ({ssh}) -> nikita @@ -241,7 +242,7 @@ describe 'actions.fs.mkdir', -> ].join ' ' describe 'system.mkdir options uid/gid', -> - return unless tags.chown + return unless test.tags.chown they 'change owner uid/gid on creation', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/move.coffee b/packages/core/test/actions/fs/move.coffee index 22280b2f8..b727739d7 100644 --- a/packages/core/test/actions/fs/move.coffee +++ b/packages/core/test/actions/fs/move.coffee @@ -1,11 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.move', -> + return unless test.tags.posix they 'error missing target', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/remove.coffee b/packages/core/test/actions/fs/remove.coffee index 448b8d573..120a4049e 100644 --- a/packages/core/test/actions/fs/remove.coffee +++ b/packages/core/test/actions/fs/remove.coffee @@ -1,11 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.remove', -> + return unless test.tags.posix they 'accept an option', ({ssh}) -> nikita diff --git a/packages/core/test/actions/fs/wait.coffee b/packages/core/test/actions/fs/wait.coffee index bd1d03ad5..150a1d2f1 100644 --- a/packages/core/test/actions/fs/wait.coffee +++ b/packages/core/test/actions/fs/wait.coffee @@ -1,11 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.fs.wait', -> + return unless test.tags.posix describe 'schema', -> diff --git a/packages/core/test/actions/ssh/close.coffee b/packages/core/test/actions/ssh/close.coffee index 377187938..fc2d9575a 100644 --- a/packages/core/test/actions/ssh/close.coffee +++ b/packages/core/test/actions/ssh/close.coffee @@ -1,11 +1,11 @@ -connect = require 'ssh2-connect' -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config.filter( ({ssh}) -> !!ssh)) describe 'actions.ssh.close', -> - return unless tags.ssh + return unless test.tags.ssh describe 'provided connection', -> diff --git a/packages/core/test/actions/ssh/open.coffee b/packages/core/test/actions/ssh/open.coffee index ab9584a70..adb96a176 100644 --- a/packages/core/test/actions/ssh/open.coffee +++ b/packages/core/test/actions/ssh/open.coffee @@ -1,14 +1,15 @@ -connect = require 'ssh2-connect' -nikita = require '../../../lib' -utils = require '../../../lib/utils' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import connect from 'ssh2-connect' +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config.filter( ({ssh}) -> !!ssh)) describe 'actions.ssh.open', -> describe 'schema', -> - return unless tags.api + return unless test.tags.api they 'config.host', ({ssh}) -> nikita @@ -16,7 +17,7 @@ describe 'actions.ssh.open', -> .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' describe 'usage', -> - return unless tags.ssh + return unless test.tags.ssh they 'from config', ({ssh}) -> nikita -> diff --git a/packages/core/test/actions/ssh/root.coffee b/packages/core/test/actions/ssh/root.coffee index 2a441ffd2..a6744536a 100644 --- a/packages/core/test/actions/ssh/root.coffee +++ b/packages/core/test/actions/ssh/root.coffee @@ -1,14 +1,15 @@ -connect = require 'ssh2-connect' -nikita = require '../../../lib' -utils = require '../../../lib/utils' -{tags, config} = require '../../test' +import connect from 'ssh2-connect' +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' # All test are executed with an ssh connection passed as an argument -they = require('mocha-they')(config.filter ({ssh}) -> !!ssh) +they = mochaThey(test.config.filter ({ssh}) -> !!ssh) -return unless tags.posix describe 'actions.ssh.root', -> + return unless test.tags.posix describe 'schema', -> diff --git a/packages/core/test/actions/wait.coffee b/packages/core/test/actions/wait.coffee index 7eee58648..5bfebe38b 100644 --- a/packages/core/test/actions/wait.coffee +++ b/packages/core/test/actions/wait.coffee @@ -1,10 +1,11 @@ -nikita = require '../../lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'actions.wait', -> - return unless tags.api + return unless test.tags.api describe 'time', -> diff --git a/packages/core/test/loaders/all.js b/packages/core/test/loaders/all.js new file mode 100644 index 000000000..344154640 --- /dev/null +++ b/packages/core/test/loaders/all.js @@ -0,0 +1,16 @@ + +import * as coffee from './coffee.js' +import * as ts from 'ts-node/esm' + +const coffeeRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/; +const tsRegex = /\.ts$/; + +export function load(url, context, next) { + if (coffeeRegex.test(url)) { + return coffee.load.apply(this, arguments) + } + if (tsRegex.test(url)) { + return ts.load.apply(this, arguments) + } + return next(url, context, next); +} diff --git a/packages/core/test/loaders/coffee.js b/packages/core/test/loaders/coffee.js new file mode 100644 index 000000000..c7b277cb7 --- /dev/null +++ b/packages/core/test/loaders/coffee.js @@ -0,0 +1,20 @@ +import CoffeeScript from 'coffeescript'; + +// See https://github.com/nodejs/node/issues/36396 +const extensionsRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/; + +export async function load(url, context, next) { + if (extensionsRegex.test(url)) { + const format = 'module'; + const { source: rawSource } = await next(url, { format }); + const source = CoffeeScript.compile(rawSource.toString(), { + bare: true, + inlineMap: true, + filename: url, + header: false, + sourceMap: false, + }); + return {format, source}; + } + return next(url, context); +} diff --git a/packages/core/test/plugins/args.coffee b/packages/core/test/plugins/args.coffee index f0d565fef..5e2ac1c8c 100644 --- a/packages/core/test/plugins/args.coffee +++ b/packages/core/test/plugins/args.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -nikita = require '../../lib' +import nikita from '@nikitajs/core' +import test from '../test.coffee' describe 'plugins.args', -> - return unless tags.api + return unless test.tags.api it 'argument is a function', -> nikita.call ({args}) -> diff --git a/packages/core/test/plugins/assertions/assert.coffee b/packages/core/test/plugins/assertions/assert.coffee index 27e8242d6..80e25538a 100644 --- a/packages/core/test/plugins/assertions/assert.coffee +++ b/packages/core/test/plugins/assertions/assert.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugin.assertions assert', -> - return unless tags.api + return unless test.tags.api describe 'array', -> diff --git a/packages/core/test/plugins/assertions/assert_exists.coffee b/packages/core/test/plugins/assertions/assert_exists.coffee index f59937bbb..847f51995 100644 --- a/packages/core/test/plugins/assertions/assert_exists.coffee +++ b/packages/core/test/plugins/assertions/assert_exists.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugin.assertions assert_exists', -> - return unless tags.posix + return unless test.tags.posix they 'success if file exists', ({ssh}) -> nikita diff --git a/packages/core/test/plugins/assertions/unassert.coffee b/packages/core/test/plugins/assertions/unassert.coffee index 9cbeb9402..eb3a7346a 100644 --- a/packages/core/test/plugins/assertions/unassert.coffee +++ b/packages/core/test/plugins/assertions/unassert.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugin.assertions unassert', -> - return unless tags.api + return unless test.tags.api describe 'array', -> diff --git a/packages/core/test/plugins/assertions/unassert_exists.coffee b/packages/core/test/plugins/assertions/unassert_exists.coffee index 38d49f067..32f0df159 100644 --- a/packages/core/test/plugins/assertions/unassert_exists.coffee +++ b/packages/core/test/plugins/assertions/unassert_exists.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugin.assertions unassert_exists', -> - return unless tags.posix + return unless test.tags.posix they 'success if no file exists', ({ssh}) -> nikita diff --git a/packages/core/test/plugins/conditions/if.coffee b/packages/core/test/plugins/conditions/if.coffee index 8d5d51ebe..aea593bdc 100644 --- a/packages/core/test/plugins/conditions/if.coffee +++ b/packages/core/test/plugins/conditions/if.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugin.conditions if', -> - return unless tags.api + return unless test.tags.api describe 'array', -> diff --git a/packages/core/test/plugins/conditions/if_execute.coffee b/packages/core/test/plugins/conditions/if_execute.coffee index 950952232..639a9e598 100644 --- a/packages/core/test/plugins/conditions/if_execute.coffee +++ b/packages/core/test/plugins/conditions/if_execute.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugin.conditions if_execute', -> - return unless tags.posix + return unless test.tags.posix they 'pass if string command is successfull', ({ssh}) -> {$status} = await nikita diff --git a/packages/core/test/plugins/conditions/if_exists.coffee b/packages/core/test/plugins/conditions/if_exists.coffee index 6affd46ec..f88e75d98 100644 --- a/packages/core/test/plugins/conditions/if_exists.coffee +++ b/packages/core/test/plugins/conditions/if_exists.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugin.conditions if_exists', -> - return unless tags.posix + return unless test.tags.posix describe 'array', -> diff --git a/packages/core/test/plugins/conditions/if_os.coffee b/packages/core/test/plugins/conditions/if_os.coffee index fe497d442..2b4e6b873 100644 --- a/packages/core/test/plugins/conditions/if_os.coffee +++ b/packages/core/test/plugins/conditions/if_os.coffee @@ -1,17 +1,18 @@ -nikita = require '../../../lib' -{tags, config, conditions_if_os} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugin.conditions if_os', -> - return unless tags.conditions_if_os + return unless test.tags.conditions_if_os they 'match distribution string', ({ssh}) -> nikita $ssh: ssh , -> {$status} = await @call - $if_os: distribution: conditions_if_os.distribution + $if_os: distribution: test.conditions_if_os.distribution $handler: -> true $status.should.be.true() {$status} = await @call @@ -22,7 +23,7 @@ describe 'plugin.conditions if_os', -> they 'match distribution array', ({ssh}) -> {$status} = await nikita - $if_os: distribution: [conditions_if_os.distribution, 'invalid'] + $if_os: distribution: [test.conditions_if_os.distribution, 'invalid'] $handler: -> true $ssh: ssh $status.should.be.true() @@ -30,8 +31,8 @@ describe 'plugin.conditions if_os', -> they 'match distribution string and version string', ({ssh}) -> {$status} = await nikita $if_os: - distribution: conditions_if_os.distribution - version: conditions_if_os.version + distribution: test.conditions_if_os.distribution + version: test.conditions_if_os.version $handler: -> true $ssh: ssh $status.should.be.true() @@ -40,10 +41,10 @@ describe 'plugin.conditions if_os', -> nikita $ssh: ssh , -> - {stdout: conditions_if_os.linux_version} = await @execute 'uname -r', trim: true unless conditions_if_os.linux_version + {stdout: test.conditions_if_os.linux_version} = await @execute 'uname -r', trim: true unless test.conditions_if_os.linux_version {$status} = await @call $if_os: - linux_version: conditions_if_os.linux_version + linux_version: test.conditions_if_os.linux_version $handler: -> true $status.should.be.true() @@ -51,13 +52,13 @@ describe 'plugin.conditions if_os', -> nikita $ssh: ssh , -> - {stdout: conditions_if_os.linux_version} = await @execute 'uname -r', trim: true unless conditions_if_os.linux_version + {stdout: test.conditions_if_os.linux_version} = await @execute 'uname -r', trim: true unless test.conditions_if_os.linux_version # Arch Linux has only linux_version - if conditions_if_os.version - then condition = version: conditions_if_os.version - else condition = linux_version: conditions_if_os.linux_version + if test.conditions_if_os.version + then condition = version: test.conditions_if_os.version + else condition = linux_version: test.conditions_if_os.linux_version {$status} = await @call - $if_os: {...condition, distribution: conditions_if_os.distribution} + $if_os: {...condition, distribution: test.conditions_if_os.distribution} $handler: -> true $status.should.be.true() @@ -65,37 +66,37 @@ describe 'plugin.conditions if_os', -> nikita $ssh: ssh , -> - {stdout: conditions_if_os.linux_version} = await @execute 'uname -r', trim: true unless conditions_if_os.linux_version + {stdout: test.conditions_if_os.linux_version} = await @execute 'uname -r', trim: true unless test.conditions_if_os.linux_version {$status} = await @call $if_os: - linux_version: conditions_if_os.linux_version.split('.')[0] + linux_version: test.conditions_if_os.linux_version.split('.')[0] $handler: -> true $status.should.be.true() they 'match arch string', ({ssh}) -> {$status} = await nikita $if_os: - arch: conditions_if_os.arch + arch: test.conditions_if_os.arch $handler: -> true $ssh: ssh $status.should.be.true() they 'match distribution string, version string, Linux version string and arch string', ({ssh}) -> {$status} = await nikita - $if_os: conditions_if_os + $if_os: test.conditions_if_os $handler: -> true $ssh: ssh $status.should.be.true() they 'match array', ({ssh}) -> # Arch Linux has only linux_version - if conditions_if_os.version - then condition = [version: conditions_if_os.version] - else condition = [linux_version: conditions_if_os.linux_version] + if test.conditions_if_os.version + then condition = [version: test.conditions_if_os.version] + else condition = [linux_version: test.conditions_if_os.linux_version] condition.push 8 {$status} = await nikita $if_os: [ - distribution: conditions_if_os.distribution + distribution: test.conditions_if_os.distribution , condition ] diff --git a/packages/core/test/plugins/conditions/index.coffee b/packages/core/test/plugins/conditions/index.coffee index 994fa5810..32ab80292 100644 --- a/packages/core/test/plugins/conditions/index.coffee +++ b/packages/core/test/plugins/conditions/index.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugin.condition', -> - return unless tags.api + return unless test.tags.api it 'normalize', -> nikita -> diff --git a/packages/core/test/plugins/conditions/unless.coffee b/packages/core/test/plugins/conditions/unless.coffee index 3db5957d9..7b9b93c21 100644 --- a/packages/core/test/plugins/conditions/unless.coffee +++ b/packages/core/test/plugins/conditions/unless.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import test from '../../test.coffee' +import nikita from '@nikitajs/core' describe 'plugin.conditions unless', -> - return unless tags.api + return unless test.tags.api describe 'array', -> diff --git a/packages/core/test/plugins/conditions/unless_execute.coffee b/packages/core/test/plugins/conditions/unless_execute.coffee index 763dfed9c..ee639d058 100644 --- a/packages/core/test/plugins/conditions/unless_execute.coffee +++ b/packages/core/test/plugins/conditions/unless_execute.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugin.conditions unless_execute', -> - return unless tags.posix + return unless test.tags.posix they 'skip if string command is successfull', ({ssh}) -> {$status, value} = await nikita diff --git a/packages/core/test/plugins/conditions/unless_exists.coffee b/packages/core/test/plugins/conditions/unless_exists.coffee index 9fa46f74d..4621771cd 100644 --- a/packages/core/test/plugins/conditions/unless_exists.coffee +++ b/packages/core/test/plugins/conditions/unless_exists.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugin.conditions unless_exists', -> - return unless tags.posix + return unless test.tags.posix describe 'array', -> diff --git a/packages/core/test/plugins/conditions/unless_os.coffee b/packages/core/test/plugins/conditions/unless_os.coffee index 1dc737226..bd9c61039 100644 --- a/packages/core/test/plugins/conditions/unless_os.coffee +++ b/packages/core/test/plugins/conditions/unless_os.coffee @@ -1,17 +1,18 @@ -nikita = require '../../../lib' -{tags, config, conditions_if_os} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugin.conditions unless_os', -> - return unless tags.conditions_if_os + return unless test.tags.conditions_if_os they 'match distribution string', ({ssh}) -> nikita ssh: ssh , -> {$status} = await @call - $unless_os: distribution: conditions_if_os.distribution + $unless_os: distribution: test.conditions_if_os.distribution $handler: -> true $status.should.be.false() {$status} = await @call @@ -29,7 +30,7 @@ describe 'plugin.conditions unless_os', -> they 'match distribution string and version string', ({ssh}) -> # Arch Linux only has linux_version - if conditions_if_os.version + if test.conditions_if_os.version then condition = version: '1' else condition = linux_version: '1' {$status} = await nikita @@ -48,7 +49,7 @@ describe 'plugin.conditions unless_os', -> they 'match distribution string, version string, Linux version string and arch string', ({ssh}) -> {$status} = await nikita - $unless_os: conditions_if_os + $unless_os: test.conditions_if_os $handler: -> true $ssh: ssh $status.should.be.false() diff --git a/packages/core/test/plugins/execute.sudo.coffee b/packages/core/test/plugins/execute.sudo.coffee index 95fee50d8..069b96507 100644 --- a/packages/core/test/plugins/execute.sudo.coffee +++ b/packages/core/test/plugins/execute.sudo.coffee @@ -1,10 +1,11 @@ -nikita = require '../../lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugins.execute.sudo', -> - return unless tags.sudo + return unless test.tags.sudo they 'readFile without sudo', ({ssh}) -> nikita diff --git a/packages/core/test/plugins/global.coffee b/packages/core/test/plugins/global.coffee index dd8f68729..c2417fba1 100644 --- a/packages/core/test/plugins/global.coffee +++ b/packages/core/test/plugins/global.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -nikita = require '../../lib' +import nikita from '@nikitajs/core' +import test from '../test.coffee' describe 'plugins.global', -> - return unless tags.api + return unless test.tags.api it 'merge from root', -> nikita diff --git a/packages/core/test/plugins/history.coffee b/packages/core/test/plugins/history.coffee index 115fbfd92..ac48e1249 100644 --- a/packages/core/test/plugins/history.coffee +++ b/packages/core/test/plugins/history.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -nikita = require '../../lib' +import nikita from '@nikitajs/core' +import test from '../test.coffee' describe 'plugins.history', -> - return unless tags.api + return unless test.tags.api describe 'children', -> diff --git a/packages/core/test/plugins/magic_dollar.coffee b/packages/core/test/plugins/magic_dollar.coffee index dc8a01a1d..98bb4c50d 100644 --- a/packages/core/test/plugins/magic_dollar.coffee +++ b/packages/core/test/plugins/magic_dollar.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -nikita = require '../../lib' +import nikita from '@nikitajs/core' +import test from '../test.coffee' describe 'plugins.magic_dollar', -> - return unless tags.api + return unless test.tags.api it 'extract metadata', -> metadata = await nikita diff --git a/packages/core/test/plugins/metadata/argument.coffee b/packages/core/test/plugins/metadata/argument.coffee index 101557910..44724e961 100644 --- a/packages/core/test/plugins/metadata/argument.coffee +++ b/packages/core/test/plugins/metadata/argument.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.argument (native)', -> - return unless tags.api + return unless test.tags.api describe 'usage', -> diff --git a/packages/core/test/plugins/metadata/argument_to_config.coffee b/packages/core/test/plugins/metadata/argument_to_config.coffee index d6c6d2f5f..0278f43a4 100644 --- a/packages/core/test/plugins/metadata/argument_to_config.coffee +++ b/packages/core/test/plugins/metadata/argument_to_config.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.argument', -> - return unless tags.api + return unless test.tags.api it 'validate schema', -> nikita diff --git a/packages/core/test/plugins/metadata/attempt.coffee b/packages/core/test/plugins/metadata/attempt.coffee index 31b2d6bbf..420df3845 100644 --- a/packages/core/test/plugins/metadata/attempt.coffee +++ b/packages/core/test/plugins/metadata/attempt.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.attempt (plugin.retry)', -> - return unless tags.api + return unless test.tags.api describe 'validation', -> diff --git a/packages/core/test/plugins/metadata/debug.coffee b/packages/core/test/plugins/metadata/debug.coffee index a5386a9bb..fa14adad4 100644 --- a/packages/core/test/plugins/metadata/debug.coffee +++ b/packages/core/test/plugins/metadata/debug.coffee @@ -1,10 +1,10 @@ -{tags} = require '../../test' -stream = require 'stream' -nikita = require '../../../lib' +import stream from 'node:stream' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'metadata "debug"', -> - return unless tags.api + return unless test.tags.api describe 'validation', -> diff --git a/packages/core/test/plugins/metadata/definitions.coffee b/packages/core/test/plugins/metadata/definitions.coffee index 278e11842..a719eea94 100644 --- a/packages/core/test/plugins/metadata/definitions.coffee +++ b/packages/core/test/plugins/metadata/definitions.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.definitions', -> - return unless tags.api + return unless test.tags.api it 'expose ajv', -> nikita ({tools: {schema}}) -> diff --git a/packages/core/test/plugins/metadata/depth.coffee b/packages/core/test/plugins/metadata/depth.coffee index 9b0fccf05..a5020b3da 100644 --- a/packages/core/test/plugins/metadata/depth.coffee +++ b/packages/core/test/plugins/metadata/depth.coffee @@ -1,11 +1,10 @@ -{tags} = require '../../test' -nikita = require '../../../lib' -registry = require '../../../lib/registry' -register = require '../../../lib/register' +import nikita from '@nikitajs/core' +import registry from '@nikitajs/core/registry' +import test from '../../test.coffee' describe 'plugins.metadata.depth', -> - return unless tags.api + return unless test.tags.api it 'start at 0', -> nikita ({metadata}) -> @@ -29,7 +28,8 @@ describe 'plugins.metadata.depth', -> key: "root value, depth #{metadata.depth}" {key} = await nikita() key.should.eql 'root value, depth 0' - registry.unregister [], register[''] + # registry.unregister [], register[''] + registry.unregister [] it 'start at depth 0 with action argument', -> {key} = await nikita ({metadata}) -> diff --git a/packages/core/test/plugins/metadata/disabled.coffee b/packages/core/test/plugins/metadata/disabled.coffee index e28cd46de..b39e60284 100644 --- a/packages/core/test/plugins/metadata/disabled.coffee +++ b/packages/core/test/plugins/metadata/disabled.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.disabled', -> - return unless tags.api + return unless test.tags.api it 'validate schema', -> nikita diff --git a/packages/core/test/plugins/metadata/execute.coffee b/packages/core/test/plugins/metadata/execute.coffee index 73f6d3993..36f8a05fa 100644 --- a/packages/core/test/plugins/metadata/execute.coffee +++ b/packages/core/test/plugins/metadata/execute.coffee @@ -1,10 +1,11 @@ -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugins.execute', -> - return unless tags.api + return unless test.tags.api describe 'usage', -> diff --git a/packages/core/test/plugins/metadata/header.coffee b/packages/core/test/plugins/metadata/header.coffee index 1f5b0a94a..dc7a18266 100644 --- a/packages/core/test/plugins/metadata/header.coffee +++ b/packages/core/test/plugins/metadata/header.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.header', -> - return unless tags.api + return unless test.tags.api it 'validate schema', -> nikita diff --git a/packages/core/test/plugins/metadata/index.coffee b/packages/core/test/plugins/metadata/index.coffee index ad0a22278..d5319e26d 100644 --- a/packages/core/test/plugins/metadata/index.coffee +++ b/packages/core/test/plugins/metadata/index.coffee @@ -1,11 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' -registry = require '../../../lib/registry' -register = require '../../../lib/register' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.index', -> - return unless tags.api + return unless test.tags.api it 'start at 0', -> nikita diff --git a/packages/core/test/plugins/metadata/position.coffee b/packages/core/test/plugins/metadata/position.coffee index add7c96f6..b845acf4a 100644 --- a/packages/core/test/plugins/metadata/position.coffee +++ b/packages/core/test/plugins/metadata/position.coffee @@ -1,11 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' -registry = require '../../../lib/registry' -register = require '../../../lib/register' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.position', -> - return unless tags.api + return unless test.tags.api it 'start at 0', -> nikita ({metadata}) -> diff --git a/packages/core/test/plugins/metadata/raw.coffee b/packages/core/test/plugins/metadata/raw.coffee index 59e7e7e0d..10b7aa311 100644 --- a/packages/core/test/plugins/metadata/raw.coffee +++ b/packages/core/test/plugins/metadata/raw.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.raw', -> - return unless tags.api + return unless test.tags.api it 'validate schema', -> nikita diff --git a/packages/core/test/plugins/metadata/raw_input.coffee b/packages/core/test/plugins/metadata/raw_input.coffee index 1ecb44842..a8f3fd2d1 100644 --- a/packages/core/test/plugins/metadata/raw_input.coffee +++ b/packages/core/test/plugins/metadata/raw_input.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.raw_input', -> - return unless tags.api + return unless test.tags.api it 'validate schema', -> nikita diff --git a/packages/core/test/plugins/metadata/raw_output.coffee b/packages/core/test/plugins/metadata/raw_output.coffee index 9a657a43a..f76371560 100644 --- a/packages/core/test/plugins/metadata/raw_output.coffee +++ b/packages/core/test/plugins/metadata/raw_output.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.raw_output', -> - return unless tags.api + return unless test.tags.api it 'validate schema', -> nikita diff --git a/packages/core/test/plugins/metadata/relax.coffee b/packages/core/test/plugins/metadata/relax.coffee index 33f8ba453..ddfbefb4f 100644 --- a/packages/core/test/plugins/metadata/relax.coffee +++ b/packages/core/test/plugins/metadata/relax.coffee @@ -1,10 +1,10 @@ -nikita = require '../../../lib' -{tags} = require '../../test' -err = require '../../../lib/utils/error' +import nikita from '@nikitajs/core' +import err from '@nikitajs/core/utils/error' +import test from '../../test.coffee' describe 'plugins.metadata.relax', -> - return unless tags.api + return unless test.tags.api it 'handler throw error', -> {error} = await nikita.call $relax: true, -> diff --git a/packages/core/test/plugins/metadata/retry.coffee b/packages/core/test/plugins/metadata/retry.coffee index 219563b9f..8a7e161ce 100644 --- a/packages/core/test/plugins/metadata/retry.coffee +++ b/packages/core/test/plugins/metadata/retry.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.retry', -> - return unless tags.api + return unless test.tags.api describe 'validation', -> diff --git a/packages/core/test/plugins/metadata/schema.coffee b/packages/core/test/plugins/metadata/schema.coffee index 069a2149f..0070d0268 100644 --- a/packages/core/test/plugins/metadata/schema.coffee +++ b/packages/core/test/plugins/metadata/schema.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.schema', -> - return unless tags.api + return unless test.tags.api it 'disabled when `false`', -> nikita diff --git a/packages/core/test/plugins/metadata/shy.coffee b/packages/core/test/plugins/metadata/shy.coffee index f33af5187..febee30dd 100644 --- a/packages/core/test/plugins/metadata/shy.coffee +++ b/packages/core/test/plugins/metadata/shy.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.shy', -> - return unless tags.api + return unless test.tags.api it 'dont alter status', -> {$status} = await nikita -> diff --git a/packages/core/test/plugins/metadata/sleep.coffee b/packages/core/test/plugins/metadata/sleep.coffee index 147e412c7..8d894bbfd 100644 --- a/packages/core/test/plugins/metadata/sleep.coffee +++ b/packages/core/test/plugins/metadata/sleep.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.sleep (plugin.retry)', -> - return unless tags.api + return unless test.tags.api describe 'validation', -> diff --git a/packages/core/test/plugins/metadata/tmpdir.coffee b/packages/core/test/plugins/metadata/tmpdir.coffee index 0b2946c8c..c3ed20c4b 100644 --- a/packages/core/test/plugins/metadata/tmpdir.coffee +++ b/packages/core/test/plugins/metadata/tmpdir.coffee @@ -1,13 +1,14 @@ -path = require 'path' -os = require 'os' -fs = require 'ssh2-fs' -{tags, config} = require '../../test' -nikita = require '../../../lib' -they = require('mocha-they')(config) +import path from 'node:path' +import os from 'node:os' +import fs from 'ssh2-fs' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugins.metadata.tmpdir', -> - return unless tags.api + return unless test.tags.api describe 'validation', -> diff --git a/packages/core/test/plugins/metadata/tmpdir.sudo.coffee b/packages/core/test/plugins/metadata/tmpdir.sudo.coffee index 0defbb48c..5ebf3f132 100644 --- a/packages/core/test/plugins/metadata/tmpdir.sudo.coffee +++ b/packages/core/test/plugins/metadata/tmpdir.sudo.coffee @@ -1,13 +1,14 @@ -path = require 'path' -os = require 'os' -fs = require 'ssh2-fs' -{tags, config} = require '../../test' -nikita = require '../../../lib' -they = require('mocha-they')(config) +import path from 'node:path' +import os from 'node:os' +import fs from 'ssh2-fs' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugins.metadata.tmpdir', -> - return unless tags.sudo + return unless test.tags.sudo they 'root ownership', ({ssh}) -> nikita diff --git a/packages/core/test/plugins/metadata/uuid.coffee b/packages/core/test/plugins/metadata/uuid.coffee index e934880cd..f8e111ef8 100644 --- a/packages/core/test/plugins/metadata/uuid.coffee +++ b/packages/core/test/plugins/metadata/uuid.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.metadata.uuid', -> - return unless tags.api + return unless test.tags.api it 'in root action', -> nikita ({metadata: {uuid}}) -> diff --git a/packages/core/test/plugins/output/logs.coffee b/packages/core/test/plugins/output/logs.coffee index a5ca4bff9..91c0713c6 100644 --- a/packages/core/test/plugins/output/logs.coffee +++ b/packages/core/test/plugins/output/logs.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.output.logs', -> - return unless tags.api + return unless test.tags.api it 'return logs', -> nikita -> @@ -21,7 +21,7 @@ describe 'plugins.output.logs', -> true $logs.some (log) -> log.file.should.eql 'logs.coffee' - log.filename.should.eql __filename + log.filename.should.match /output\/logs\.coffee$/ it 'return logs in error', -> nikita -> diff --git a/packages/core/test/plugins/output/status.coffee b/packages/core/test/plugins/output/status.coffee index 54ed0167c..6279453d6 100644 --- a/packages/core/test/plugins/output/status.coffee +++ b/packages/core/test/plugins/output/status.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.output.status', -> - return unless tags.api + return unless test.tags.api describe 'returned coercion', -> diff --git a/packages/core/test/plugins/pubsub.coffee b/packages/core/test/plugins/pubsub.coffee index 96b8866b9..2a0150c62 100644 --- a/packages/core/test/plugins/pubsub.coffee +++ b/packages/core/test/plugins/pubsub.coffee @@ -1,12 +1,13 @@ -memory = require '../../lib/plugins/pubsub/engines/memory' -nikita = require '../../lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import memory from '@nikitajs/core/plugins/pubsub/engines/memory' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugins.pubsub', -> - return unless tags.api + return unless test.tags.api they 'set then get', ({ssh}) -> engine = memory() diff --git a/packages/core/test/plugins/ssh.coffee b/packages/core/test/plugins/ssh.coffee index 359d6ada8..b6676f16d 100644 --- a/packages/core/test/plugins/ssh.coffee +++ b/packages/core/test/plugins/ssh.coffee @@ -1,11 +1,12 @@ -nikita = require '../../lib' -utils = require '../../lib/utils' -{tags, config} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config.filter ({ssh}) -> !!ssh) describe '`plugins.ssh`', -> - return unless tags.ssh + return unless test.tags.ssh describe 'from parent (action.ssh)', -> @@ -14,13 +15,13 @@ describe '`plugins.ssh`', -> utils.ssh.compare(conn, ssh).should.be.true() they 'from config in child action', ({ssh}) -> - nikita $ssh: ssh, ({ssh: conn}) -> - @call -> @call -> + nikita $ssh: ssh, -> + @call -> @call ({ssh: conn}) -> utils.ssh.compare(conn, ssh).should.be.true() they 'from connection', ({ssh}) -> {ssh: conn} = await nikita.ssh.open ssh - await nikita $ssh: conn, (action) -> + await nikita $ssh: conn, ({ssh: conn}) -> @call -> @call -> utils.ssh.compare(conn, ssh).should.be.true() nikita.ssh.close ssh: conn diff --git a/packages/core/test/plugins/templated.coffee b/packages/core/test/plugins/templated.coffee index b07b3f937..a2ca164d6 100644 --- a/packages/core/test/plugins/templated.coffee +++ b/packages/core/test/plugins/templated.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -nikita = require '../../lib' +import test from '../test.coffee' +import nikita from '@nikitajs/core' describe 'plugins.templated', -> - return unless tags.api + return unless test.tags.api it 'access config', -> nikita diff --git a/packages/core/test/plugins/time.coffee b/packages/core/test/plugins/time.coffee index b045c1223..1df4fb64c 100644 --- a/packages/core/test/plugins/time.coffee +++ b/packages/core/test/plugins/time.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -nikita = require '../../lib' +import test from '../test.coffee' +import nikita from '@nikitajs/core' describe 'plugins.time', -> - return unless tags.api + return unless test.tags.api it 'start and end time', -> nikita -> diff --git a/packages/core/test/plugins/tools/dig.coffee b/packages/core/test/plugins/tools/dig.coffee index 5afae060d..3d450c2e9 100644 --- a/packages/core/test/plugins/tools/dig.coffee +++ b/packages/core/test/plugins/tools/dig.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.tools.dig', -> - return unless tags.api + return unless test.tags.api it 'root action', -> nikita a_key: 'a value', ({tools: {dig}}) -> diff --git a/packages/core/test/plugins/tools/events.coffee b/packages/core/test/plugins/tools/events.coffee index c982c6253..cc6b5ab7f 100644 --- a/packages/core/test/plugins/tools/events.coffee +++ b/packages/core/test/plugins/tools/events.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.tools.events', -> - return unless tags.api + return unless test.tags.api it 'emit events', -> nikita ({tools: {events}}) -> diff --git a/packages/core/test/plugins/tools/find.coffee b/packages/core/test/plugins/tools/find.coffee index f31494490..f761736dc 100644 --- a/packages/core/test/plugins/tools/find.coffee +++ b/packages/core/test/plugins/tools/find.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.tools.find', -> - return unless tags.api + return unless test.tags.api describe 'action', -> diff --git a/packages/core/test/plugins/tools/log.coffee b/packages/core/test/plugins/tools/log.coffee index b7544edf1..5b343ced9 100644 --- a/packages/core/test/plugins/tools/log.coffee +++ b/packages/core/test/plugins/tools/log.coffee @@ -1,10 +1,10 @@ -{tags} = require '../../test' -nikita = require '../../../lib' -stream = require 'stream' +import stream from 'node:stream' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.tools.log', -> - return unless tags.api + return unless test.tags.api describe 'events', -> @@ -14,7 +14,7 @@ describe 'plugins.tools.log', -> events.on 'text', (msg) -> resolve msg log message: 'getme' - .should.finally.containEql + .should.finally.match message: 'getme' level: 'INFO' index: 0 @@ -23,8 +23,8 @@ describe 'plugins.tools.log', -> type: 'text' depth: 0 file: 'log.coffee' - filename: __filename - line: 23 + filename: /.*log\.coffee/ + line: 20 it 'argument is immutable', -> arg = key: 'value' diff --git a/packages/core/test/plugins/tools/path.coffee b/packages/core/test/plugins/tools/path.coffee index f5526694a..ea81eb0f2 100644 --- a/packages/core/test/plugins/tools/path.coffee +++ b/packages/core/test/plugins/tools/path.coffee @@ -1,11 +1,12 @@ -p = require 'path' -nikita = require '../../../lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) +import p from 'node:path' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'plugins.tools.path', -> - return unless tags.posix + return unless test.tags.posix they 'ssh defined in current action', ({ssh}) -> nikita diff --git a/packages/core/test/plugins/tools/schema.boolean.coffee b/packages/core/test/plugins/tools/schema.boolean.coffee index 0152c7819..3962c3b32 100644 --- a/packages/core/test/plugins/tools/schema.boolean.coffee +++ b/packages/core/test/plugins/tools/schema.boolean.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.tools.schema.boolean', -> - return unless tags.api + return unless test.tags.api definitions = config: type: 'object' diff --git a/packages/core/test/plugins/tools/schema.coffee b/packages/core/test/plugins/tools/schema.coffee index af1755b86..c329ac53b 100644 --- a/packages/core/test/plugins/tools/schema.coffee +++ b/packages/core/test/plugins/tools/schema.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.tools.schema', -> - return unless tags.api + return unless test.tags.api describe 'usage', -> diff --git a/packages/core/test/plugins/tools/schema.instanceof.coffee b/packages/core/test/plugins/tools/schema.instanceof.coffee index a66bc5671..5a0d7ec55 100644 --- a/packages/core/test/plugins/tools/schema.instanceof.coffee +++ b/packages/core/test/plugins/tools/schema.instanceof.coffee @@ -1,10 +1,10 @@ -stream = require 'stream' -{tags} = require '../../test' -nikita = require '../../../lib' +import stream from 'node:stream' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.tools.schema.instanceof', -> - return unless tags.api + return unless test.tags.api it 'Error with valid property', -> nikita ({registry}) -> diff --git a/packages/core/test/plugins/tools/schema.ref.coffee b/packages/core/test/plugins/tools/schema.ref.coffee index 1b4696f9f..e537028b3 100644 --- a/packages/core/test/plugins/tools/schema.ref.coffee +++ b/packages/core/test/plugins/tools/schema.ref.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.tools.schema.$ref', -> - return unless tags.api + return unless test.tags.api it 'invalid ref definition', -> nikita @@ -91,18 +91,14 @@ describe '$ref with `module:` protocol', -> , (->) .should.be.rejectedWith code: 'NIKITA_SCHEMA_INVALID_MODULE' - message: [ - 'NIKITA_SCHEMA_INVALID_MODULE:' - 'the module location is not resolvable,' - 'module name is "invalid/action".' - ].join ' ' + message: /NIKITA_SCHEMA_INVALID_MODULE: the module location is not resolvable, module name is "invalid\/action", error message is ".*"\./ it 'valid ref location', -> nikita $tmpdir: true , ({metadata: {tmpdir}}) -> await @fs.base.writeFile - target: "#{tmpdir}/a_module" + target: "#{tmpdir}/a_module.js" content: ''' module.exports = { metadata: { @@ -125,7 +121,7 @@ describe '$ref with `module:` protocol', -> config: type: 'object' properties: - 'a_source': $ref: "module://#{tmpdir}/a_module#/definitions/config" + 'a_source': $ref: "module://#{tmpdir}/a_module.js#/definitions/config" a_source: an_integer: '123' , ({config}) -> config: config config.should.eql diff --git a/packages/core/test/plugins/tools/status.coffee b/packages/core/test/plugins/tools/status.coffee index d3effa2af..d0e2675b3 100644 --- a/packages/core/test/plugins/tools/status.coffee +++ b/packages/core/test/plugins/tools/status.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.tools.status', -> - return unless tags.api + return unless test.tags.api it 'root', -> nikita ({tools: {status}}) -> diff --git a/packages/core/test/plugins/tools/walk.coffee b/packages/core/test/plugins/tools/walk.coffee index 08edf2263..9d9e3cca8 100644 --- a/packages/core/test/plugins/tools/walk.coffee +++ b/packages/core/test/plugins/tools/walk.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'plugins.tools.walk', -> - return unless tags.api + return unless test.tags.api describe 'action', -> diff --git a/packages/core/test/registry/create.coffee b/packages/core/test/registry/create.coffee index dbccd3236..8443bf20d 100644 --- a/packages/core/test/registry/create.coffee +++ b/packages/core/test/registry/create.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -registry = require '../../lib/registry' +import registry from '@nikitajs/core/registry' +import test from '../test.coffee' describe 'registry.create', -> - return unless tags.api + return unless test.tags.api it 'static', -> registry.create.should.be.a.Function() diff --git a/packages/core/test/registry/deprecate.coffee b/packages/core/test/registry/deprecate.coffee index 38b42822d..7b35ac8f9 100644 --- a/packages/core/test/registry/deprecate.coffee +++ b/packages/core/test/registry/deprecate.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -nikita = require '../../lib' +import nikita from '@nikitajs/core' +import test from '../test.coffee' describe 'registry.deprecate', -> - return unless tags.api + return unless test.tags.api it.skip 'function handler without new name', -> nikita diff --git a/packages/core/test/registry/get.coffee b/packages/core/test/registry/get.coffee index da8ea7ffc..af61e0893 100644 --- a/packages/core/test/registry/get.coffee +++ b/packages/core/test/registry/get.coffee @@ -1,10 +1,10 @@ -{tags} = require '../test' -registry = require '../../lib/registry' -{plugandplay} = require 'plug-and-play' +import {plugandplay} from 'plug-and-play' +import registry from '@nikitajs/core/registry' +import test from '../test.coffee' describe 'registry.get', -> - return unless tags.api + return unless test.tags.api describe 'get all', -> diff --git a/packages/core/test/registry/index.coffee b/packages/core/test/registry/index.coffee index ff1ca8688..e2df97109 100644 --- a/packages/core/test/registry/index.coffee +++ b/packages/core/test/registry/index.coffee @@ -1,10 +1,10 @@ -{tags} = require '../test' -nikita = require '../../lib' -registry = require '../../lib/registry' +import nikita from '@nikitajs/core' +import registry from '@nikitajs/core/registry' +import test from '../test.coffee' describe 'registry', -> - return unless tags.api + return unless test.tags.api it 'statically', -> registry.register 'my_function', (->) diff --git a/packages/core/test/registry/register.coffee b/packages/core/test/registry/register.coffee index 209775949..9238cb558 100644 --- a/packages/core/test/registry/register.coffee +++ b/packages/core/test/registry/register.coffee @@ -1,10 +1,10 @@ -{tags} = require '../test' -nikita = require '../../lib' -registry = require '../../lib/registry' +import nikita from '@nikitajs/core' +import registry from '@nikitajs/core/registry' +import test from '../test.coffee' describe 'registry.register', -> - return unless tags.api + return unless test.tags.api describe 'namespace', -> @@ -98,11 +98,11 @@ describe 'registry.register', -> it 'is a string, function style', -> # Room for improvement in the future nikita ({registry}) -> - await registry.register 'an_action', '@nikitajs/core/lib/actions/execute' + await registry.register 'an_action', '@nikitajs/core/actions/execute' result = await @registry.registered 'an_action' .should.resolvedWith true {metadata, config} = await @registry.get 'an_action' - metadata.module.should.eql '@nikitajs/core/lib/actions/execute' + metadata.module.should.eql '@nikitajs/core/actions/execute' should(config).be.undefined() it 'is a string, object style', -> @@ -110,17 +110,17 @@ describe 'registry.register', -> nikita ({registry}) -> await registry.register 'an_action': - '': '@nikitajs/core/lib/actions/execute' - 'child': '@nikitajs/core/lib/actions/execute' + '': '@nikitajs/core/actions/execute' + 'child': '@nikitajs/core/actions/execute' result = await @registry.registered 'an_action' .should.resolvedWith true result = await @registry.registered ['an_action', 'child'] .should.resolvedWith true {metadata, config} = await @registry.get 'an_action' - metadata.module.should.eql '@nikitajs/core/lib/actions/execute' + metadata.module.should.eql '@nikitajs/core/actions/execute' should(config).be.undefined() {metadata, config} = await @registry.get ['an_action', 'child'] - metadata.module.should.eql '@nikitajs/core/lib/actions/execute' + metadata.module.should.eql '@nikitajs/core/actions/execute' should(config).be.undefined() it.skip 'receive config', -> diff --git a/packages/core/test/registry/registered.coffee b/packages/core/test/registry/registered.coffee index cabf08364..7219bb7e9 100644 --- a/packages/core/test/registry/registered.coffee +++ b/packages/core/test/registry/registered.coffee @@ -1,10 +1,10 @@ -{tags} = require '../test' -nikita = require '../../lib' -registry = require '../../lib/registry' +import nikita from '@nikitajs/core' +import registry from '@nikitajs/core/registry' +import test from '../test.coffee' describe 'registry.registered', -> - return unless tags.api + return unless test.tags.api describe 'global', -> diff --git a/packages/core/test/registry/unregister.coffee b/packages/core/test/registry/unregister.coffee index 999a0043b..1d3616bf3 100644 --- a/packages/core/test/registry/unregister.coffee +++ b/packages/core/test/registry/unregister.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -nikita = require '../../lib' +import nikita from '@nikitajs/core' +import test from '../test.coffee' describe 'registry.unregister', -> - return unless tags.api + return unless test.tags.api describe 'global', -> diff --git a/packages/core/test/session/action/config.coffee b/packages/core/test/session/action/config.coffee index 255ce1a1e..94fcedb8b 100644 --- a/packages/core/test/session/action/config.coffee +++ b/packages/core/test/session/action/config.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'session.handler.config', -> - return unless tags.api + return unless test.tags.api it 'ensure it is not polluted', -> nikita.call ({config}) -> diff --git a/packages/core/test/session/action/handler.coffee b/packages/core/test/session/action/handler.coffee index f0728e6cc..6fd55be10 100644 --- a/packages/core/test/session/action/handler.coffee +++ b/packages/core/test/session/action/handler.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'session.action.handler', -> - return unless tags.api + return unless test.tags.api describe 'root action', -> diff --git a/packages/core/test/session/contextualize.coffee b/packages/core/test/session/contextualize.coffee index 9f452a0f6..40d2e270c 100644 --- a/packages/core/test/session/contextualize.coffee +++ b/packages/core/test/session/contextualize.coffee @@ -1,10 +1,9 @@ -{tags} = require '../test' -contextualize = require '../../lib/session/contextualize' -normalize = require '../../lib/session/normalize' +import test from '../test.coffee' +import contextualize from '../../lib/session/contextualize.js' describe 'session.contextualize', -> - return unless tags.api + return unless test.tags.api it 'handle function as handler', -> expect = diff --git a/packages/core/test/session/creation.coffee b/packages/core/test/session/creation.coffee index b8ae2d210..83b511b3a 100644 --- a/packages/core/test/session/creation.coffee +++ b/packages/core/test/session/creation.coffee @@ -1,10 +1,10 @@ -{tags} = require '../test' -nikita = require '../../lib' -session = require '../../lib/session' +import nikita from '@nikitajs/core' +import session from '@nikitajs/core/session' +import test from '../test.coffee' describe 'session.creation', -> - return unless tags.api + return unless test.tags.api describe 'args is array of actions', -> diff --git a/packages/core/test/session/error.coffee b/packages/core/test/session/error.coffee index 24be71dbe..71d12bc8b 100644 --- a/packages/core/test/session/error.coffee +++ b/packages/core/test/session/error.coffee @@ -1,12 +1,12 @@ -{tags} = require '../test' -nikita = require '../../lib' -session = require '../../lib/session' +import '@nikitajs/core/register' +import session from '@nikitajs/core/session' +import test from '../test.coffee' # Test the construction of the session namespace stored in state describe 'session.error', -> - return unless tags.api + return unless test.tags.api describe 'cascade', -> diff --git a/packages/core/test/session/namespace.coffee b/packages/core/test/session/namespace.coffee index 5e6dc7267..3e497f929 100644 --- a/packages/core/test/session/namespace.coffee +++ b/packages/core/test/session/namespace.coffee @@ -1,12 +1,12 @@ -{tags} = require '../test' -nikita = require '../../lib' -registry = require '../../lib/registry' +import nikita from '@nikitajs/core' +import registry from '@nikitajs/core/registry' +import test from '../test.coffee' # Test the construction of the session namespace stored in state describe 'session.namespace', -> - return unless tags.api + return unless test.tags.api it 'call registered action', -> nikita ({registry}) -> diff --git a/packages/core/test/session/normalize.coffee b/packages/core/test/session/normalize.coffee index 1fef8d329..67e28eb47 100644 --- a/packages/core/test/session/normalize.coffee +++ b/packages/core/test/session/normalize.coffee @@ -1,10 +1,10 @@ -{tags} = require '../test' -contextualize = require '../../lib/session/contextualize' -normalize = require '../../lib/session/normalize' +import contextualize from '@nikitajs/core/session/contextualize' +import normalize from '@nikitajs/core/session/normalize' +import test from '../test.coffee' describe 'session.normalize', -> - return unless tags.api + return unless test.tags.api it 'handle function as handler', -> expect = diff --git a/packages/core/test/session/plugins/on_action.coffee b/packages/core/test/session/plugins/on_action.coffee index cdcf8cf8e..6ce7a5f52 100644 --- a/packages/core/test/session/plugins/on_action.coffee +++ b/packages/core/test/session/plugins/on_action.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'session.plugins.on_action', -> - return unless tags.api + return unless test.tags.api it 'call action from global registry', -> nikita.call diff --git a/packages/core/test/session/plugins/on_normalize.coffee b/packages/core/test/session/plugins/on_normalize.coffee index d14e33e35..f0ef0d2b8 100644 --- a/packages/core/test/session/plugins/on_normalize.coffee +++ b/packages/core/test/session/plugins/on_normalize.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'session.plugins.on_normalize', -> - return unless tags.api + return unless test.tags.api it 'call action from global registry', -> nikita.call diff --git a/packages/core/test/session/plugins/session.action.coffee b/packages/core/test/session/plugins/session.action.coffee index 4bc2d2f79..6c29d6ffe 100644 --- a/packages/core/test/session/plugins/session.action.coffee +++ b/packages/core/test/session/plugins/session.action.coffee @@ -1,12 +1,12 @@ -{tags} = require '../../test' -nikita = require '../../../lib' -session = require '../../../lib/session' +import nikita from '@nikitajs/core' +import session from '@nikitajs/core/session' +import test from '../../test.coffee' # Test the construction of the session namespace stored in state describe 'session.plugins.session.action', -> - return unless tags.api + return unless test.tags.api describe 'runtime', -> diff --git a/packages/core/test/session/plugins/session.register.coffee b/packages/core/test/session/plugins/session.register.coffee index 0fc8f8ac9..4d037ad7d 100644 --- a/packages/core/test/session/plugins/session.register.coffee +++ b/packages/core/test/session/plugins/session.register.coffee @@ -1,12 +1,12 @@ -{tags} = require '../../test' -nikita = require '../../../lib' -session = require '../../../lib/session' +import nikita from '@nikitajs/core' +import session from '@nikitajs/core/session' +import test from '../../test.coffee' # Test the construction of the session namespace stored in state describe 'session.plugins.session.register', -> - return unless tags.api + return unless test.tags.api it 'alter action - sync', -> nikita ({plugins, registry}) -> diff --git a/packages/core/test/session/plugins/session.resolved.coffee b/packages/core/test/session/plugins/session.resolved.coffee index 8d903f35d..65313041d 100644 --- a/packages/core/test/session/plugins/session.resolved.coffee +++ b/packages/core/test/session/plugins/session.resolved.coffee @@ -1,12 +1,12 @@ -{tags} = require '../../test' -nikita = require '../../../lib' -session = require '../../../lib/session' +import nikita from '@nikitajs/core' +import session from '@nikitajs/core/session' +import test from '../../test.coffee' # Test the construction of the session namespace stored in state describe 'session.plugins.session.resolved', -> - return unless tags.api + return unless test.tags.api it 'test', -> stack = [] diff --git a/packages/core/test/session/plugins/session.result.coffee b/packages/core/test/session/plugins/session.result.coffee index a019c1e70..0a1c04a20 100644 --- a/packages/core/test/session/plugins/session.result.coffee +++ b/packages/core/test/session/plugins/session.result.coffee @@ -1,14 +1,14 @@ -{tags} = require '../../test' -nikita = require '../../../lib' -session = require '../../../lib/session' -history = require '../../../lib/plugins/history' -position = require '../../../lib/plugins/metadata/position' +import nikita from '@nikitajs/core' +import session from '@nikitajs/core/session' +import history from '@nikitajs/core/plugins/history' +import position from '@nikitajs/core/plugins/metadata/position' +import test from '../../test.coffee' # Test the construction of the session namespace stored in state describe 'session.plugins.session.result', -> - return unless tags.api + return unless test.tags.api it 'is called before action and children resolved', -> called = false diff --git a/packages/core/test/session/registry.coffee b/packages/core/test/session/registry.coffee index 768950ee7..9c9936359 100644 --- a/packages/core/test/session/registry.coffee +++ b/packages/core/test/session/registry.coffee @@ -1,10 +1,10 @@ -{tags} = require '../test' -nikita = require '../../lib' -registry = require '../../lib/registry' +import nikita from '@nikitajs/core' +import registry from'@nikitajs/core/registry' +import test from '../test.coffee' describe 'action.registry', -> - return unless tags.api + return unless test.tags.api describe 'access', -> diff --git a/packages/core/test/session/scheduler/error.coffee b/packages/core/test/session/scheduler/error.coffee index 3f75ed148..253d76c06 100644 --- a/packages/core/test/session/scheduler/error.coffee +++ b/packages/core/test/session/scheduler/error.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'session.scheduler.error', -> - return unless tags.api + return unless test.tags.api describe 'in last child', -> diff --git a/packages/core/test/session/scheduler/flow.coffee b/packages/core/test/session/scheduler/flow.coffee index e1716165c..9d73256c0 100644 --- a/packages/core/test/session/scheduler/flow.coffee +++ b/packages/core/test/session/scheduler/flow.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'session.scheduler.flow', -> - return unless tags.api + return unless test.tags.api it 'executed 1 args with 2 actions sequentially', -> stack = [] diff --git a/packages/core/test/session/scheduler/index.coffee b/packages/core/test/session/scheduler/index.coffee index d9c30b0d6..263406a52 100644 --- a/packages/core/test/session/scheduler/index.coffee +++ b/packages/core/test/session/scheduler/index.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'session.scheduler', -> - return unless tags.api + return unless test.tags.api describe 'arguments', -> diff --git a/packages/core/test/session/scheduler/option.strict.coffee b/packages/core/test/session/scheduler/option.strict.coffee index ac565aa1f..51d443392 100644 --- a/packages/core/test/session/scheduler/option.strict.coffee +++ b/packages/core/test/session/scheduler/option.strict.coffee @@ -1,9 +1,9 @@ -{tags} = require '../../test' -nikita = require '../../../lib' +import nikita from '@nikitajs/core' +import test from '../../test.coffee' describe 'session.scheduler.option.strict', -> - return unless tags.api + return unless test.tags.api it 'function', -> nikita diff --git a/packages/core/test/test.coffee b/packages/core/test/test.coffee index f17f2b1bb..ccc602447 100644 --- a/packages/core/test/test.coffee +++ b/packages/core/test/test.coffee @@ -1,14 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config +export default config.default diff --git a/packages/core/test/utils/array.coffee b/packages/core/test/utils/array.coffee index 216f9fc0c..ec539cb47 100644 --- a/packages/core/test/utils/array.coffee +++ b/packages/core/test/utils/array.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -{intersect, flatten, merge, multiply, shuffle, unique} = require '../../lib/utils/array' +import {intersect, flatten, merge, multiply, shuffle, unique} from '@nikitajs/core/utils/array' +import test from '../test.coffee' describe 'utils.array', -> - return unless tags.api + return unless test.tags.api # it 'compare', -> # array.compare(['a'], ['a']).should.be.true() diff --git a/packages/core/test/utils/buffer.coffee b/packages/core/test/utils/buffer.coffee index 530edb56e..6bf5c31e5 100644 --- a/packages/core/test/utils/buffer.coffee +++ b/packages/core/test/utils/buffer.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -{trim} = require '../../lib/utils/buffer' +import {trim} from '@nikitajs/core/utils/buffer' +import test from '../test.coffee' describe 'utils.buffer', -> - return unless tags.api + return unless test.tags.api describe 'trim', -> diff --git a/packages/core/test/utils/error.coffee b/packages/core/test/utils/error.coffee index de5c7d22e..a2a7ff81d 100644 --- a/packages/core/test/utils/error.coffee +++ b/packages/core/test/utils/error.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -error = require '../../lib/utils/error' +import error from '@nikitajs/core/utils/error' +import test from '../test.coffee' describe 'utils.error', -> - return unless tags.api + return unless test.tags.api describe 'function', -> diff --git a/packages/core/test/utils/mode.coffee b/packages/core/test/utils/mode.coffee index da752d134..9c2db56a4 100644 --- a/packages/core/test/utils/mode.coffee +++ b/packages/core/test/utils/mode.coffee @@ -1,9 +1,9 @@ -mode = require '../../lib/utils/mode' -{tags} = require '../test' +import mode from '@nikitajs/core/utils/mode' +import test from '../test.coffee' describe 'utils.mode', -> - return unless tags.api + return unless test.tags.api describe 'stringify', -> diff --git a/packages/core/test/utils/object.coffee b/packages/core/test/utils/object.coffee index 062aa2ab6..eb9b85ca8 100644 --- a/packages/core/test/utils/object.coffee +++ b/packages/core/test/utils/object.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -{copy, diff, equals, filter, match, trim} = require '../../lib/utils/object' +import {copy, diff, filter, match, trim} from '@nikitajs/core/utils/object' +import test from '../test.coffee' describe 'utils.object', -> - return unless tags.api + return unless test.tags.api describe 'copy', -> diff --git a/packages/core/test/utils/os.coffee b/packages/core/test/utils/os.coffee index 9472964e5..b2cc24383 100644 --- a/packages/core/test/utils/os.coffee +++ b/packages/core/test/utils/os.coffee @@ -1,16 +1,16 @@ -{tags} = require '../test' -{merge} = require 'mixme' -{whoami} = require '../../lib/utils/os' +import {merge} from 'mixme' +import {whoami} from '@nikitajs/core/utils/os' +import test from '../test.coffee' + cleanup = (property, value) -> if value? process.env[property] = value else delete process.env[property] - describe 'utils.os', -> - return unless tags.api + return unless test.tags.api describe 'whoami', -> diff --git a/packages/core/test/utils/promise.coffee b/packages/core/test/utils/promise.coffee index 58bfe83f8..f14b31f37 100644 --- a/packages/core/test/utils/promise.coffee +++ b/packages/core/test/utils/promise.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -promise = require '../../lib/utils/promise' +import promise from '@nikitajs/core/utils/promise' +import test from '../test.coffee' describe 'utils.promise', -> - return unless tags.api + return unless test.tags.api describe 'array_filter', -> diff --git a/packages/core/test/utils/regexp.coffee b/packages/core/test/utils/regexp.coffee index d15ec0037..c0db4959c 100644 --- a/packages/core/test/utils/regexp.coffee +++ b/packages/core/test/utils/regexp.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -regexp = require '../../lib/utils/regexp' +import regexp from '@nikitajs/core/utils/regexp' +import test from '../test.coffee' describe 'utils.regexp', -> - return unless tags.api + return unless test.tags.api it 'is', -> regexp.is /.*/ diff --git a/packages/core/test/utils/semver.coffee b/packages/core/test/utils/semver.coffee index 9cbad15b7..2f580c4ff 100644 --- a/packages/core/test/utils/semver.coffee +++ b/packages/core/test/utils/semver.coffee @@ -1,9 +1,9 @@ -semver = require '../../lib/utils/semver' -{tags} = require '../test' +import semver from '@nikitajs/core/utils/semver' +import test from '../test.coffee' describe 'utils.semver', -> - return unless tags.api + return unless test.tags.api it 'sanitize', -> semver.sanitize('5').should.eql '5.x.x' diff --git a/packages/core/test/utils/ssh.coffee b/packages/core/test/utils/ssh.coffee index 00446fd56..cb0e564e1 100644 --- a/packages/core/test/utils/ssh.coffee +++ b/packages/core/test/utils/ssh.coffee @@ -1,13 +1,14 @@ -{merge} = require 'mixme' -nikita = require '../../lib' -utils = require '../../lib/utils' -{tags, config} = require '../test' +import {merge} from 'mixme' +import mochaThey from 'mocha-they' +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from '../test.coffee' # All test are executed with an ssh connection passed as an argument -they = require('mocha-they')(config.filter ({ssh}) -> !!ssh) +they = mochaThey(test.config.filter ({ssh}) -> !!ssh) describe 'utils.ssh', -> - return unless tags.api + return unless test.tags.api describe 'compare', -> diff --git a/packages/core/test/utils/stats.coffee b/packages/core/test/utils/stats.coffee index 7817835ce..b39236dcd 100644 --- a/packages/core/test/utils/stats.coffee +++ b/packages/core/test/utils/stats.coffee @@ -1,32 +1,32 @@ -utils = require '../../lib/utils' -{tags} = require '../test' +import stats from '@nikitajs/core/utils/stats' +import test from '../test.coffee' describe 'utils.stats', -> - return unless tags.api + return unless test.tags.api describe 'type', -> it 'directory is true', -> mode = parseInt '40755', 8 - utils.stats.isDirectory(mode).should.be.true() + stats.isDirectory(mode).should.be.true() it 'directory is false', -> mode = parseInt '100644', 8 - utils.stats.isDirectory(mode).should.be.false() + stats.isDirectory(mode).should.be.false() describe 'type', -> it 'file is true', -> mode = parseInt '100644', 8 - utils.stats.isFile(mode).should.be.true() + stats.isFile(mode).should.be.true() it 'file is false', -> mode = parseInt '40755', 8 - utils.stats.isFile(mode).should.be.false() + stats.isFile(mode).should.be.false() describe 'type', -> it 'file is false', -> - utils.stats.type(parseInt('40755', 8)).should.eql 'Directory' - utils.stats.type(parseInt('100644', 8)).should.eql 'File' + stats.type(parseInt('40755', 8)).should.eql 'Directory' + stats.type(parseInt('100644', 8)).should.eql 'File' diff --git a/packages/core/test/utils/string.coffee b/packages/core/test/utils/string.coffee index c52a3b79c..f82a35e98 100644 --- a/packages/core/test/utils/string.coffee +++ b/packages/core/test/utils/string.coffee @@ -1,9 +1,9 @@ -string = require '../../lib/utils/string' -{tags} = require '../test' +import string from '@nikitajs/core/utils/string' +import test from '../test.coffee' describe 'utils.string', -> - return unless tags.api + return unless test.tags.api it 'escapeshellarg', -> string.escapeshellarg("try to 'parse this").should.eql "'try to '\"'\"'parse this'" diff --git a/packages/core/test/utils/tilde.coffee b/packages/core/test/utils/tilde.coffee index 91386cb30..d59946d1b 100644 --- a/packages/core/test/utils/tilde.coffee +++ b/packages/core/test/utils/tilde.coffee @@ -1,9 +1,9 @@ -{tags} = require '../test' -tilde = require '../../lib/utils/tilde' +import tilde from '@nikitajs/core/utils/tilde' +import test from '../test.coffee' describe 'utils.tilde', -> - return unless tags.api + return unless test.tags.api describe 'normalize', -> diff --git a/packages/db/README.md b/packages/db/README.md index 6e0e12613..a03f00283 100644 --- a/packages/db/README.md +++ b/packages/db/README.md @@ -2,3 +2,20 @@ # Nikita "db" package The "db" package provides Nikita actions for various database operations. Currently supports PostgreSQL, MySQL and MariaDB. + +## Usage + +```js +import "@nikitajs/db/register"; +import nikita from "@nikitajs/core"; + +const { exists } = nikita.db.database.exists({ + admin_username: "root", + admin_password: "rootme", + database: "test_database_exists_0_db", + engine: "postgresql", + host: "postgres", + port: 5432, +}); +console.info(exists); +``` diff --git a/packages/db/env/mariadb/Dockerfile b/packages/db/env/mariadb/Dockerfile index 2087fc8ae..73470d34d 100644 --- a/packages/db/env/mariadb/Dockerfile +++ b/packages/db/env/mariadb/Dockerfile @@ -1,38 +1,42 @@ -FROM centos:7.9.2009 -MAINTAINER David Worms - -RUN \ - # Install Node dependencies - yum install -y git make \ - # Install SSH and sudo - && yum install -y openssh-server openssh-clients sudo \ - && ssh-keygen -A - -## Install Mysql client -RUN yum install -y mysql - -RUN yum clean all +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " + +RUN apt update -y && \ + DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC \ + apt install -y \ + # System + tzdata \ + # Node.js dependencies + build-essential curl git iputils-ping \ + # SSH server and client + openssh-server \ + # Sudo to start ssh + sudo \ + # Mysql client + mysql-client && \ + # SSH configuration + ssh-keygen -A && \ + mkdir -p /run/sshd ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita WORKDIR /nikita/packages/db -# Sudo User -RUN useradd nikita -d /home/nikita \ - && mkdir -p /home/nikita \ - && chown nikita /home/nikita \ - && chmod 700 /home/nikita \ - && echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +# User as sudoer +RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita -# Install Node.js -# Note, CentOS 7.9.2009 incompatible with Node.js >= 18 -RUN \ - curl -L https://git.io/n-install | bash -s -- -y 16.19 -ENV PATH /home/nikita/n/bin:$PATH +# SSH certificate +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys -RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' \ - && cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys +# Node.js +ENV NODE_VERSION stable +RUN curl -L https://git.io/n-install | bash -s -- -y +ENV PATH /home/nikita/n/bin:$PATH ENTRYPOINT ["/entrypoint.sh"] -CMD [] diff --git a/packages/db/env/mariadb/docker-compose.yml b/packages/db/env/mariadb/docker-compose.yml index e3d1f1ca1..72399dbf1 100644 --- a/packages/db/env/mariadb/docker-compose.yml +++ b/packages/db/env/mariadb/docker-compose.yml @@ -2,18 +2,37 @@ services: nodejs: build: . - image: nikita_db_mariadb_nodejs container_name: nikita_db_mariadb_nodejs - volumes: - - ../../../../:/nikita depends_on: - mariadb + # Not fully working for now, waiting is implemented inside entrypoint.sh + # mariadb: + # condition: service_healthy environment: NIKITA_TEST_MODULE: /nikita/packages/db/env/mariadb/test.coffee + image: nikita_db_mariadb_nodejs + networks: + - nikita + volumes: + - ../../../../:/nikita mariadb: - image: mariadb:latest container_name: nikita_db_mariadb_db environment: MARIADB_ROOT_PASSWORD: rootme expose: - "3306" + # healthcheck: + # test: "bash -c 'echo > /dev/tcp/mariadb/3306'" + # interval: 1s + # timeout: 1s + # retries: 5 + image: mariadb:latest + # logging: + # driver: none + networks: + - nikita + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/db/env/mariadb/entrypoint.sh b/packages/db/env/mariadb/entrypoint.sh index a6d8cce0d..3509fa334 100755 --- a/packages/db/env/mariadb/entrypoint.sh +++ b/packages/db/env/mariadb/entrypoint.sh @@ -1,10 +1,22 @@ #!/bin/bash -set -e + +# Note, we had to disable the exit builtin because the until condition kill the +# script despite the documentation which state "the shell does not exit if the +# command that fails is part of the command list immediately following a while +# or until keyword" +# set -e # Source Node.js . ~/.bashrc # Start ssh daemon sudo /usr/sbin/sshd +# Wait until MariaDB is ready +i=0; until echo > /dev/tcp/mariadb/3306; do + [[ i -eq 5 ]] && >&2 echo 'Docker not yet started after 5s' && exit 1 + ((i++)) + sleep 1 +done +# Test execution if test -t 0; then # We have TTY, so probably an interactive container... if [[ $@ ]]; then @@ -12,6 +24,7 @@ if test -t 0; then . ~/.bashrc npx mocha $@ else + # Run bash when no argument export PS1='[\u@\h : \w]\$ ' /bin/bash fi diff --git a/packages/db/env/mariadb/run.sh b/packages/db/env/mariadb/run.sh index 3eaa8bd58..9b4173181 100755 --- a/packages/db/env/mariadb/run.sh +++ b/packages/db/env/mariadb/run.sh @@ -1,4 +1,6 @@ #!/usr/bin/env bash cd `pwd`/`dirname ${BASH_SOURCE}` -docker compose up --abort-on-container-exit +# Use `--attach` to restrict attaching to the specified services, +# disabling logging for other services +docker compose up --abort-on-container-exit --attach nodejs diff --git a/packages/db/env/mariadb/test.coffee b/packages/db/env/mariadb/test.coffee index 0d2d547a2..ab565ee76 100644 --- a/packages/db/env/mariadb/test.coffee +++ b/packages/db/env/mariadb/test.coffee @@ -1,10 +1,9 @@ -module.exports = +export default tags: db: true db: mariadb: - admin_db: 'root' admin_password: 'rootme' admin_username: 'root' engine: 'mariadb' diff --git a/packages/db/env/mysql/Dockerfile b/packages/db/env/mysql/Dockerfile index cd664c8ca..73470d34d 100644 --- a/packages/db/env/mysql/Dockerfile +++ b/packages/db/env/mysql/Dockerfile @@ -1,37 +1,42 @@ -FROM centos:7.9.2009 -MAINTAINER David Worms - -RUN \ - # Install Node dependencies - yum install -y git make \ - # Install SSH and sudo - && yum install -y openssh-server openssh-clients sudo \ - && ssh-keygen -A - -## Install Mysql client -RUN yum install -y mysql - -RUN yum clean all +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " + +RUN apt update -y && \ + DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC \ + apt install -y \ + # System + tzdata \ + # Node.js dependencies + build-essential curl git iputils-ping \ + # SSH server and client + openssh-server \ + # Sudo to start ssh + sudo \ + # Mysql client + mysql-client && \ + # SSH configuration + ssh-keygen -A && \ + mkdir -p /run/sshd ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita WORKDIR /nikita/packages/db -# Sudo User -RUN useradd nikita -d /home/nikita \ - && mkdir -p /home/nikita \ - && chown nikita /home/nikita \ - && chmod 700 /home/nikita \ - && echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +# User as sudoer +RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita -# Install Node.js -# Note, CentOS 7.9.2009 incompatible with Node.js >= 18 -RUN \ - curl -L https://git.io/n-install | bash -s -- -y 16.20.1 -ENV PATH /home/nikita/n/bin:$PATH +# SSH certificate +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys -RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N ''\ - && cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys +# Node.js +ENV NODE_VERSION stable +RUN curl -L https://git.io/n-install | bash -s -- -y +ENV PATH /home/nikita/n/bin:$PATH ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/db/env/mysql/docker-compose.yml b/packages/db/env/mysql/docker-compose.yml index 523243f91..a3059b169 100644 --- a/packages/db/env/mysql/docker-compose.yml +++ b/packages/db/env/mysql/docker-compose.yml @@ -2,20 +2,39 @@ services: nodejs: build: . - image: nikita_db_mysql_nodejs container_name: nikita_db_mysql_nodejs - volumes: - - ../../../../:/nikita depends_on: - mysql + # Not fully working for now, waiting is implemented inside entrypoint.sh + # mysql: + # condition: service_healthy environment: NIKITA_TEST_MODULE: /nikita/packages/db/env/mysql/test.coffee + image: nikita_db_mysql_nodejs + networks: + - nikita + volumes: + - ../../../../:/nikita mysql: - image: mysql:latest container_name: nikita_db_mysql_db # To avoid an error in the latest versions: ERROR 2059 (HY000): Authentication plugin 'caching_sha2_password' cannot be loaded - command: --default-authentication-plugin=mysql_native_password + command: --default-authentication-plugin=caching_sha2_password environment: MYSQL_ROOT_PASSWORD: rootme expose: - "3306" + # healthcheck: + # test: "bash -c 'echo > /dev/tcp/mysql/3306'" + # interval: 1s + # timeout: 1s + # retries: 20 + image: mysql:latest + # logging: + # driver: none + networks: + - nikita + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/db/env/mysql/entrypoint.sh b/packages/db/env/mysql/entrypoint.sh index 43fbf9c83..018af69a6 100755 --- a/packages/db/env/mysql/entrypoint.sh +++ b/packages/db/env/mysql/entrypoint.sh @@ -1,17 +1,29 @@ #!/bin/bash -set -e + +# Note, we had to disable the exit builtin because the until condition kill the +# script despite the documentation which state "the shell does not exit if the +# command that fails is part of the command list immediately following a while +# or until keyword" +# set -e # Source Node.js . ~/.bashrc # Start ssh daemon sudo /usr/sbin/sshd -# We have TTY, so probably an interactive container... +# Wait until MySQL is ready +i=0; until echo > /dev/tcp/mysql/3306; do + [[ i -eq 10 ]] && >&2 echo 'Docker not yet started after 10s' && exit 1 + ((i++)) + sleep 1 +done +# Test execution if test -t 0; then - # Some command(s) has been passed to container? Execute them and exit. - # No commands provided? Run bash. + # We have TTY, so probably an interactive container... if [[ $@ ]]; then + # Transfer arguments to mocha node_modules/.bin/mocha $@ else + # Run bash when no argument export PS1='[\u@\h : \w]\$ ' /bin/bash fi diff --git a/packages/db/env/mysql/run.sh b/packages/db/env/mysql/run.sh index 3eaa8bd58..9b4173181 100755 --- a/packages/db/env/mysql/run.sh +++ b/packages/db/env/mysql/run.sh @@ -1,4 +1,6 @@ #!/usr/bin/env bash cd `pwd`/`dirname ${BASH_SOURCE}` -docker compose up --abort-on-container-exit +# Use `--attach` to restrict attaching to the specified services, +# disabling logging for other services +docker compose up --abort-on-container-exit --attach nodejs diff --git a/packages/db/env/mysql/test.coffee b/packages/db/env/mysql/test.coffee index 32dd0e5f3..3bb0db154 100644 --- a/packages/db/env/mysql/test.coffee +++ b/packages/db/env/mysql/test.coffee @@ -1,10 +1,9 @@ -module.exports = +export default tags: db: true db: mysql: - admin_db: 'root' admin_password: 'rootme' admin_username: 'root' engine: 'mysql' diff --git a/packages/db/env/postgresql/Dockerfile b/packages/db/env/postgresql/Dockerfile index cdef9f194..0b3a97adb 100644 --- a/packages/db/env/postgresql/Dockerfile +++ b/packages/db/env/postgresql/Dockerfile @@ -1,37 +1,42 @@ -FROM centos:7.9.2009 -MAINTAINER David Worms - -RUN \ - # Install Node dependencies - yum install -y git make \ - # Install SSH and sudo - && yum install -y openssh-server openssh-clients sudo \ - && ssh-keygen -A - -# Install PostgreSQL client -RUN yum install -y postgresql - -RUN yum clean all +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " + +RUN apt update -y && \ + DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC \ + apt install -y \ + # System + tzdata \ + # Node.js dependencies + build-essential curl git iputils-ping \ + # SSH server and client + openssh-server \ + # Sudo to start ssh + sudo \ + # Mysql client + postgresql-client && \ + # SSH configuration + ssh-keygen -A && \ + mkdir -p /run/sshd ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita WORKDIR /nikita/packages/db -# Sudo User -RUN useradd nikita -d /home/nikita \ - && mkdir -p /home/nikita \ - && chown nikita /home/nikita \ - && chmod 700 /home/nikita \ - && echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +# User as sudoer +RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita -# Install Node.js -# Note, CentOS 7.9.2009 incompatible with Node.js >= 18 -RUN \ - curl -L https://git.io/n-install | bash -s -- -y 16.19 -ENV PATH /home/nikita/n/bin:$PATH +# SSH certificate +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys -RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' \ - && cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys +# Node.js +ENV NODE_VERSION stable +RUN curl -L https://git.io/n-install | bash -s -- -y +ENV PATH /home/nikita/n/bin:$PATH ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/db/env/postgresql/docker-compose.yml b/packages/db/env/postgresql/docker-compose.yml index e58e8f27e..b7ac0ed6d 100644 --- a/packages/db/env/postgresql/docker-compose.yml +++ b/packages/db/env/postgresql/docker-compose.yml @@ -2,19 +2,40 @@ services: nodejs: build: . - image: nikita_db_postgresql_nodejs container_name: nikita_db_postgresql_nodejs - volumes: - - ../../../../:/nikita depends_on: - postgres + # Not fully working for now, waiting is implemented inside entrypoint.sh + # dind: + # condition: service_healthy + # postgres: + # condition: service_healthy environment: NIKITA_TEST_MODULE: /nikita/packages/db/env/postgresql/test.coffee + image: nikita_db_postgresql_nodejs + networks: + - nikita + volumes: + - ../../../../:/nikita postgres: - image: postgres:9.5 + image: postgres:16 container_name: nikita_db_postgresql_db environment: POSTGRES_USER: root POSTGRES_PASSWORD: rootme expose: - "5432" + # healthcheck: + # test: "bash -c 'echo > /dev/tcp/postgres/5432'" + # interval: 1s + # timeout: 1s + # retries: 5 + logging: + driver: none + networks: + - nikita + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/db/env/postgresql/entrypoint.sh b/packages/db/env/postgresql/entrypoint.sh index 43fbf9c83..1573fca2a 100755 --- a/packages/db/env/postgresql/entrypoint.sh +++ b/packages/db/env/postgresql/entrypoint.sh @@ -1,17 +1,28 @@ #!/bin/bash -set -e + +# Note, we had to disable the exit builtin because the until condition kill the +# script despite the documentation which state "the shell does not exit if the +# command that fails is part of the command list immediately following a while +# or until keyword" +# set -e # Source Node.js . ~/.bashrc # Start ssh daemon sudo /usr/sbin/sshd -# We have TTY, so probably an interactive container... +# Wait until PostgreSQL is ready +i=0; until echo > /dev/tcp/postgres/5432; do + [[ i -eq 5 ]] && >&2 echo 'Docker not yet started after 5s' && exit 1 + ((i++)) + sleep 1 +done +# Test execution if test -t 0; then - # Some command(s) has been passed to container? Execute them and exit. - # No commands provided? Run bash. + # We have TTY, so probably an interactive container... if [[ $@ ]]; then node_modules/.bin/mocha $@ else + # Run bash when no argument export PS1='[\u@\h : \w]\$ ' /bin/bash fi diff --git a/packages/db/env/postgresql/run.sh b/packages/db/env/postgresql/run.sh index 3eaa8bd58..9b4173181 100755 --- a/packages/db/env/postgresql/run.sh +++ b/packages/db/env/postgresql/run.sh @@ -1,4 +1,6 @@ #!/usr/bin/env bash cd `pwd`/`dirname ${BASH_SOURCE}` -docker compose up --abort-on-container-exit +# Use `--attach` to restrict attaching to the specified services, +# disabling logging for other services +docker compose up --abort-on-container-exit --attach nodejs diff --git a/packages/db/env/postgresql/test.coffee b/packages/db/env/postgresql/test.coffee index 7a839ed57..2bc2afb3d 100644 --- a/packages/db/env/postgresql/test.coffee +++ b/packages/db/env/postgresql/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: db: true db: @@ -9,7 +9,6 @@ module.exports = engine: 'postgresql' host: 'postgres' port: 5432 - admin_db: 'root' config: [ label: 'local' , diff --git a/packages/db/lib/database/exists/README.md b/packages/db/lib/database/exists/README.md index bd5cfb2a9..25a25bc27 100644 --- a/packages/db/lib/database/exists/README.md +++ b/packages/db/lib/database/exists/README.md @@ -14,6 +14,6 @@ const {exists} = nikita.db.database.exists({ engine: 'postgresql', host: 'postgres', port: 5432, - admin_db: 'root', + database: 'root', }) ``` diff --git a/packages/db/lib/database/exists/index.js b/packages/db/lib/database/exists/index.js index 3b52c015c..ab865d995 100644 --- a/packages/db/lib/database/exists/index.js +++ b/packages/db/lib/database/exists/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const cmd_list_tables = config.engine === 'postgresql' ? `SELECT datname FROM pg_database WHERE datname = '${config.database}';` diff --git a/packages/db/lib/database/exists/schema.json b/packages/db/lib/database/exists/schema.json index 08d425bff..5ac3f171e 100644 --- a/packages/db/lib/database/exists/schema.json +++ b/packages/db/lib/database/exists/schema.json @@ -3,7 +3,7 @@ "type": "object", "allOf": [ { - "$ref": "module://@nikitajs/db/lib/query#/definitions/db" + "$ref": "module://@nikitajs/db/query#/definitions/db" } ], "properties": { diff --git a/packages/db/lib/database/index.js b/packages/db/lib/database/index.js index dd5e6e9b4..96ab5078d 100644 --- a/packages/db/lib/database/index.js +++ b/packages/db/lib/database/index.js @@ -1,10 +1,10 @@ // Dependencies -const dedent = require("dedent"); -const definitions = require("./schema.json"); -const utils = require("../utils"); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; +import utils from "@nikitajs/db/utils"; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { if (config.user == null) { config.user = []; diff --git a/packages/db/lib/database/remove/index.js b/packages/db/lib/database/remove/index.js index c21860ca0..00b5ce0d2 100644 --- a/packages/db/lib/database/remove/index.js +++ b/packages/db/lib/database/remove/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { // Avoid errors when database argument is provided in the command: // - Postgres: "ERROR: cannot drop the currently open database" diff --git a/packages/db/lib/database/remove/schema.json b/packages/db/lib/database/remove/schema.json index 9d8802029..cf5e2deca 100644 --- a/packages/db/lib/database/remove/schema.json +++ b/packages/db/lib/database/remove/schema.json @@ -3,7 +3,7 @@ "type": "object", "allOf": [ { - "$ref": "module://@nikitajs/db/lib/query#/definitions/db" + "$ref": "module://@nikitajs/db/query#/definitions/db" } ], "properties": { diff --git a/packages/db/lib/database/schema.json b/packages/db/lib/database/schema.json index a68f5575c..92d883eb0 100644 --- a/packages/db/lib/database/schema.json +++ b/packages/db/lib/database/schema.json @@ -3,7 +3,7 @@ "type": "object", "allOf": [ { - "$ref": "module://@nikitajs/db/lib/query#/definitions/db" + "$ref": "module://@nikitajs/db/query#/definitions/db" } ], "properties": { diff --git a/packages/db/lib/database/wait/index.js b/packages/db/lib/database/wait/index.js index 9d562ade4..f6ddfecb2 100644 --- a/packages/db/lib/database/wait/index.js +++ b/packages/db/lib/database/wait/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const cmd_list_tables = config.engine === 'postgresql' ? `SELECT datname FROM pg_database WHERE datname = '${config.database}';` diff --git a/packages/db/lib/database/wait/schema.json b/packages/db/lib/database/wait/schema.json index 27dea5551..4c851f4f5 100644 --- a/packages/db/lib/database/wait/schema.json +++ b/packages/db/lib/database/wait/schema.json @@ -3,7 +3,7 @@ "type": "object", "allOf": [ { - "$ref": "module://@nikitajs/db/lib/query#/definitions/db" + "$ref": "module://@nikitajs/db/query#/definitions/db" } ], "properties": { diff --git a/packages/db/lib/query/index.js b/packages/db/lib/query/index.js index fba85c54e..ff0142718 100644 --- a/packages/db/lib/query/index.js +++ b/packages/db/lib/query/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); -const utils = require("../utils"); +import definitions from "./schema.json" assert { type: "json" }; +import utils from "@nikitajs/db/utils"; // Action -module.exports = { +export default { handler: async function ({ config }) { const { $status, stdout } = await this.execute({ command: utils.db.command(config), diff --git a/packages/db/lib/query/schema.json b/packages/db/lib/query/schema.json index dc675812f..401073906 100644 --- a/packages/db/lib/query/schema.json +++ b/packages/db/lib/query/schema.json @@ -46,7 +46,7 @@ "type": "object", "allOf": [ { - "$ref": "module://@nikitajs/db/lib/query#/definitions/db" + "$ref": "module://@nikitajs/db/query#/definitions/db" } ], "properties": { diff --git a/packages/db/lib/register.js b/packages/db/lib/register.js index 09488ebb2..46f4b864f 100644 --- a/packages/db/lib/register.js +++ b/packages/db/lib/register.js @@ -1,36 +1,29 @@ // Dependencies -const registry = require('@nikitajs/core/lib/registry'); +import registry from "@nikitajs/core/registry"; // Action registration -module.exports = { +const actions = { db: { database: { - '': '@nikitajs/db/lib/database', - exists: '@nikitajs/db/lib/database/exists', - remove: '@nikitajs/db/lib/database/remove', - wait: '@nikitajs/db/lib/database/wait' + '': '@nikitajs/db/database', + exists: '@nikitajs/db/database/exists', + remove: '@nikitajs/db/database/remove', + wait: '@nikitajs/db/database/wait' }, - query: '@nikitajs/db/lib/query', + query: '@nikitajs/db/query', schema: { - '': '@nikitajs/db/lib/schema', - exists: '@nikitajs/db/lib/schema/exists', - list: '@nikitajs/db/lib/schema/list', - remove: '@nikitajs/db/lib/schema/remove' + '': '@nikitajs/db/schema', + exists: '@nikitajs/db/schema/exists', + list: '@nikitajs/db/schema/list', + remove: '@nikitajs/db/schema/remove' }, user: { - '': '@nikitajs/db/lib/user', - exists: '@nikitajs/db/lib/user/exists', - remove: '@nikitajs/db/lib/user/remove' + '': '@nikitajs/db/user', + exists: '@nikitajs/db/user/exists', + remove: '@nikitajs/db/user/remove' } } }; -(async function() { - try { - return (await registry.register(module.exports)); - } catch (error) { - console.error(error.stack); - return process.exit(1); - } -})(); +await registry.register(actions) diff --git a/packages/db/lib/schema/exists/index.js b/packages/db/lib/schema/exists/index.js index e60274a44..8df56c01f 100644 --- a/packages/db/lib/schema/exists/index.js +++ b/packages/db/lib/schema/exists/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {$status} = await this.db.query(config, { command: `SELECT 1 FROM pg_namespace WHERE nspname = '${config.schema}';`, diff --git a/packages/db/lib/schema/exists/schema.json b/packages/db/lib/schema/exists/schema.json index a14a6895e..4a142c40e 100644 --- a/packages/db/lib/schema/exists/schema.json +++ b/packages/db/lib/schema/exists/schema.json @@ -3,7 +3,7 @@ "type": "object", "allOf": [ { - "$ref": "module://@nikitajs/db/lib/query#/definitions/db" + "$ref": "module://@nikitajs/db/query#/definitions/db" } ], "properties": { diff --git a/packages/db/lib/schema/index.js b/packages/db/lib/schema/index.js index fc044d9e9..4a7a01db4 100644 --- a/packages/db/lib/schema/index.js +++ b/packages/db/lib/schema/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); -const utils = require("../utils"); +import definitions from "./schema.json" assert { type: "json" }; +import utils from "@nikitajs/db/utils"; // Action -module.exports = { +export default { handler: async function({config}) { const {$status} = await this.execute({ $shy: true, diff --git a/packages/db/lib/schema/list/index.js b/packages/db/lib/schema/list/index.js index f8153b2e6..f49ebf2cd 100644 --- a/packages/db/lib/schema/list/index.js +++ b/packages/db/lib/schema/list/index.js @@ -1,9 +1,9 @@ // Dependencies -const utils = require('@nikitajs/core/lib/utils'); -const definitions = require('./schema.json'); +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; // ## Exports -module.exports = { +export default { handler: async function({config}) { const {stdout} = await this.db.query(config, { command: '\\dn', diff --git a/packages/db/lib/schema/list/schema.json b/packages/db/lib/schema/list/schema.json index e31908154..584732705 100644 --- a/packages/db/lib/schema/list/schema.json +++ b/packages/db/lib/schema/list/schema.json @@ -3,7 +3,7 @@ "type": "object", "allOf": [ { - "$ref": "module://@nikitajs/db/lib/query#/definitions/db" + "$ref": "module://@nikitajs/db/query#/definitions/db" } ], "properties": { diff --git a/packages/db/lib/schema/remove/index.js b/packages/db/lib/schema/remove/index.js index 7979354a8..32cc7483f 100644 --- a/packages/db/lib/schema/remove/index.js +++ b/packages/db/lib/schema/remove/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {exists} = await this.db.schema.exists(config); if (!exists) { diff --git a/packages/db/lib/schema/remove/schema.json b/packages/db/lib/schema/remove/schema.json index 6f4ddad14..14478967a 100644 --- a/packages/db/lib/schema/remove/schema.json +++ b/packages/db/lib/schema/remove/schema.json @@ -3,7 +3,7 @@ "type": "object", "allOf": [ { - "$ref": "module://@nikitajs/db/lib/query#/definitions/db" + "$ref": "module://@nikitajs/db/query#/definitions/db" } ], "properties": { diff --git a/packages/db/lib/schema/schema.json b/packages/db/lib/schema/schema.json index a14a6895e..4a142c40e 100644 --- a/packages/db/lib/schema/schema.json +++ b/packages/db/lib/schema/schema.json @@ -3,7 +3,7 @@ "type": "object", "allOf": [ { - "$ref": "module://@nikitajs/db/lib/query#/definitions/db" + "$ref": "module://@nikitajs/db/query#/definitions/db" } ], "properties": { diff --git a/packages/db/lib/user/exists/index.js b/packages/db/lib/user/exists/index.js index 902390595..2aa3559dc 100644 --- a/packages/db/lib/user/exists/index.js +++ b/packages/db/lib/user/exists/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); -const {db} = require('../../utils'); +import definitions from "./schema.json" assert { type: "json" }; +import { db } from "@nikitajs/db/utils"; // Action -module.exports = { +export default { handler: async function({config}) { const {stdout} = await this.db.query(db.connection_config(config), { database: undefined, diff --git a/packages/db/lib/user/exists/schema.json b/packages/db/lib/user/exists/schema.json index 5c1921153..f4338b405 100644 --- a/packages/db/lib/user/exists/schema.json +++ b/packages/db/lib/user/exists/schema.json @@ -3,7 +3,7 @@ "type": "object", "allOf": [ { - "$ref": "module://@nikitajs/db/lib/query#/definitions/db" + "$ref": "module://@nikitajs/db/query#/definitions/db" } ], "properties": { diff --git a/packages/db/lib/user/index.js b/packages/db/lib/user/index.js index 0d02a2ab1..6aea492a4 100644 --- a/packages/db/lib/user/index.js +++ b/packages/db/lib/user/index.js @@ -1,10 +1,10 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require("./schema.json"); -const utils = require("../utils"); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; +import utils from "@nikitajs/db/utils"; // Action -module.exports = { +export default { handler: async function ({ config }) { // Commands const engine = config.engine === 'mysql' || config.engine === 'mariadb' @@ -45,21 +45,24 @@ module.exports = { }, "\\dt" ) + - " 2>&1 >/dev/null | grep -e '^psql:\\sFATAL.*password\\sauthentication\\sfailed\\sfor\\suser.*'"; - const command_password_change = engine === "mysql" - ? utils.db.command( - config, - `SET PASSWORD FOR ${config.username} = PASSWORD ('${config.password}');` - ) - : engine === "mariadb" - ? utils.db.command( - config, - `ALTER USER ${config.username} IDENTIFIED BY '${config.password}';` - ) - : utils.db.command( - config, - `ALTER USER ${config.username} WITH PASSWORD '${config.password}';` - ) + " 2>&1 >/dev/null | grep -e '^.*\\sFATAL.*password\\sauthentication\\sfailed\\sfor\\suser.*'"; + const command_password_change = + engine === "mysql" + ? utils.db.command( + config, + // Old mysql version for MySQL 5.7.5 and earlier or MariaDB 10.1.20 and earlier + // `SET PASSWORD FOR ${config.username} = PASSWORD ('${config.password}');` + `ALTER USER ${config.username} IDENTIFIED BY '${config.password}';` + ) + : engine === "mariadb" + ? utils.db.command( + config, + `ALTER USER ${config.username} IDENTIFIED BY '${config.password}';` + ) + : utils.db.command( + config, + `ALTER USER ${config.username} WITH PASSWORD '${config.password}';` + ); return await this.execute({ command: dedent` signal=3 diff --git a/packages/db/lib/user/remove/index.js b/packages/db/lib/user/remove/index.js index 143b819e6..a14dc9013 100644 --- a/packages/db/lib/user/remove/index.js +++ b/packages/db/lib/user/remove/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { await this.db.query(config, { command: `DROP USER IF EXISTS ${config.username};` diff --git a/packages/db/lib/user/remove/schema.json b/packages/db/lib/user/remove/schema.json index 3a17ac3f1..5cd3a449a 100644 --- a/packages/db/lib/user/remove/schema.json +++ b/packages/db/lib/user/remove/schema.json @@ -3,7 +3,7 @@ "type": "object", "allOf": [ { - "$ref": "module://@nikitajs/db/lib/query#/definitions/db" + "$ref": "module://@nikitajs/db/query#/definitions/db" } ], "properties": { diff --git a/packages/db/lib/user/schema.json b/packages/db/lib/user/schema.json index af1ff74a8..0db34c946 100644 --- a/packages/db/lib/user/schema.json +++ b/packages/db/lib/user/schema.json @@ -3,7 +3,7 @@ "type": "object", "allOf": [ { - "$ref": "module://@nikitajs/db/lib/query#/definitions/db" + "$ref": "module://@nikitajs/db/query#/definitions/db" } ], "properties": { diff --git a/packages/db/lib/utils/db.js b/packages/db/lib/utils/db.js new file mode 100644 index 000000000..4605fdc96 --- /dev/null +++ b/packages/db/lib/utils/db.js @@ -0,0 +1,167 @@ +import utils from "@nikitajs/core/utils"; + +// Escape SQL for Bash processing. +const escape = function (sql) { + return sql.replace(/[\\"]/g, "\\$&"); +}; + +// Build the CLI query command. +const command = function (...opts) { + const config = {}; + for (let opt of opts) { + if (typeof opt === "string") { + opt = { + command: opt, + }; + } + for (const k in opt) { + config[k] = opt[k]; + } + } + if (!config.admin_username) { + throw utils.error("NIKITA_DB_UTILS_REQUIRED_ARGUMENTS", [ + 'Missing required argument: "admin_username"', + ]); + } + if (!config.admin_password) { + throw utils.error("NIKITA_DB_UTILS_REQUIRED_ARGUMENTS", [ + 'Missing required argument: "admin_password"', + ]); + } + if (!config.host) { + throw utils.error("NIKITA_DB_UTILS_REQUIRED_ARGUMENTS", [ + 'Missing required argument: "host"', + ]); + } + switch (config.engine) { + case "mariadb": + case "mysql": + if (config.path == null) { + config.path = "mysql"; + } + if (config.port == null) { + config.port = "3306"; + } + return [ + `${config.path}`, + `-h${config.host}`, + `-P${config.port}`, + `-u${config.admin_username}`, + `-p'${config.admin_password}'`, + config.database ? `-D${config.database}` : void 0, + config.mysql_config ? `${config.mysql_config}` : void 0, + // -N, --skip-column-names Don't write column names in results. + // -s, --silent Be more silent. Print results with a tab as separator, each row on new line. + // -r, --raw Write fields without conversion. Used with --batch. + config.silent ? "-N -s -r" : void 0, + config.command ? `-e \"${escape(config.command)}\"` : void 0, + ] + .filter(Boolean) + .join(" "); + case "postgresql": + if (config.path == null) { + config.path = "psql"; + } + if (config.port == null) { + config.port = "5432"; + } + return [ + `PGPASSWORD=${config.admin_password}`, + `${config.path}`, + `-h ${config.host}`, + `-p ${config.port}`, + `-U ${config.admin_username}`, + config.database ? `-d ${config.database}` : void 0, + config.postgres_config ? `${config.postgres_config}` : void 0, + // -t, --tuples-only Print rows only + // -A, --no-align Unaligned table output mode + // -q, --quiet Run quietly (no messages, only query output) + "-tAq", + config.command ? `-c \"${config.command}\"` : void 0, + ] + .filter(Boolean) + .join(" "); + default: + throw Error(`Unsupported engine: ${JSON.stringify(config.engine)}`); + } +}; + +/* +Parse JDBC URL + +Enrich the result of `url.parse` with the "engine" and "db" properties. + +Example: + +``` +parse 'jdbc:mysql://host1:3306,host2:3306/hive?createDatabaseIfNotExist=true' +{ engine: 'mysql', + addresses: + [ { host: 'host1', port: '3306' }, + { host: 'host2', port: '3306' } ], + database: 'hive' } +``` +*/ +const jdbc = function (jdbc) { + if (/^jdbc:mysql:/.test(jdbc)) { + let [_, __, addresses, database] = + /^jdbc:(.*?):\/+(.*?)\/(.*?)(\?(.*)|$)/.exec(jdbc); + return { + engine: "mysql", + addresses: addresses.split(",").map(function (address) { + const [host, port] = address.split(":"); + return { + host: host, + port: port || 3306, + }; + }), + database: database, + }; + } else if (/^jdbc:postgresql:/.test(jdbc)) { + let [_, __, addresses, database] = + /^jdbc:(.*?):\/+(.*?)\/(.*?)(\?(.*)|$)/.exec(jdbc); + return { + engine: "postgresql", + addresses: addresses.split(",").map(function (address) { + const [host, port] = address.split(":"); + return { + host: host, + port: port || 5432, + }; + }), + database: database, + }; + } else { + throw Error("Invalid JDBC URL"); + } +}; + +// Filter connection properties +const connection_config = function (opts) { + const config = {}; + for (const k in opts) { + const v = opts[k]; + if ( + k !== "admin_username" && + k !== "admin_password" && + k !== "database" && + k !== "engine" && + k !== "host" && + k !== "port" && + k !== "silent" + ) { + continue; + } + config[k] = v; + } + return config; +}; + +export { escape, command, jdbc, connection_config }; + +export default { + escape: escape, + command: command, + jdbc: jdbc, + connection_config: connection_config, +}; diff --git a/packages/db/lib/utils/index.js b/packages/db/lib/utils/index.js index c5c64f4fd..a8969bc28 100644 --- a/packages/db/lib/utils/index.js +++ b/packages/db/lib/utils/index.js @@ -1,151 +1,9 @@ +import utils from "@nikitajs/core/utils"; +import db from "@nikitajs/db/utils/db"; -const utils = require('@nikitajs/core/lib/utils'); +export { db }; -module.exports = { +export default { ...utils, - db: { - - // Escape SQL for Bash processing. - escape: function(sql) { - return sql.replace(/[\\"]/g, "\\$&"); - }, - // Build the CLI query command. - command: function(...opts) { - var config, i, k, len, opt, v; - config = {}; - for (i = 0, len = opts.length; i < len; i++) { - opt = opts[i]; - if (typeof opt === 'string') { - opt = { - command: opt - }; - } - for (k in opt) { - v = opt[k]; - config[k] = v; - } - } - if (!config.admin_username) { - throw utils.error('NIKITA_DB_UTILS_REQUIRED_ARGUMENTS', ['Missing required argument: "admin_username"']); - } - if (!config.admin_password) { - throw utils.error('NIKITA_DB_UTILS_REQUIRED_ARGUMENTS', ['Missing required argument: "admin_password"']); - } - if (!config.host) { - throw utils.error('NIKITA_DB_UTILS_REQUIRED_ARGUMENTS', ['Missing required argument: "host"']); - } - switch (config.engine) { - case 'mariadb': - case 'mysql': - if (config.path == null) { - config.path = 'mysql'; - } - if (config.port == null) { - config.port = '3306'; - } - return [ - `${config.path}`, - `-h${config.host}`, - `-P${config.port}`, - `-u${config.admin_username}`, - `-p'${config.admin_password}'`, - config.database ? `-D${config.database}` : void 0, - config.mysql_config ? `${config.mysql_config}` : void 0, - // -N, --skip-column-names Don't write column names in results. - // -s, --silent Be more silent. Print results with a tab as separator, each row on new line. - // -r, --raw Write fields without conversion. Used with --batch. - config.silent ? "-N -s -r" : void 0, - config.command ? `-e \"${module.exports.db.escape(config.command)}\"` : void 0 - ].filter(Boolean).join(' '); - case 'postgresql': - if (config.path == null) { - config.path = 'psql'; - } - if (config.port == null) { - config.port = '5432'; - } - return [ - `PGPASSWORD=${config.admin_password}`, - `${config.path}`, - `-h ${config.host}`, - `-p ${config.port}`, - `-U ${config.admin_username}`, - config.database ? `-d ${config.database}` : void 0, - config.postgres_config ? `${config.postgres_config}` : void 0, - // -t, --tuples-only Print rows only - // -A, --no-align Unaligned table output mode - // -q, --quiet Run quietly (no messages, only query output) - "-tAq", - config.command ? `-c \"${config.command}\"` : void 0 - ].filter(Boolean).join(' '); - default: - throw Error(`Unsupported engine: ${JSON.stringify(config.engine)}`); - } - }, - /* - Parse JDBC URL - - Enrich the result of `url.parse` with the "engine" and "db" properties. - - Example: - - ``` - parse 'jdbc:mysql://host1:3306,host2:3306/hive?createDatabaseIfNotExist=true' - { engine: 'mysql', - addresses: - [ { host: 'host1', port: '3306' }, - { host: 'host2', port: '3306' } ], - database: 'hive' } - ``` - */ - jdbc: function(jdbc) { - var _, addresses, database, engine; - if (/^jdbc:mysql:/.test(jdbc)) { - [_, engine, addresses, database] = /^jdbc:(.*?):\/+(.*?)\/(.*?)(\?(.*)|$)/.exec(jdbc); - addresses = addresses.split(',').map(function(address) { - var host, port; - [host, port] = address.split(':'); - return { - host: host, - port: port || 3306 - }; - }); - return { - engine: 'mysql', - addresses: addresses, - database: database - }; - } else if (/^jdbc:postgresql:/.test(jdbc)) { - [_, engine, addresses, database] = /^jdbc:(.*?):\/+(.*?)\/(.*?)(\?(.*)|$)/.exec(jdbc); - addresses = addresses.split(',').map(function(address) { - var host, port; - [host, port] = address.split(':'); - return { - host: host, - port: port || 5432 - }; - }); - return { - engine: 'postgresql', - addresses: addresses, - database: database - }; - } else { - throw Error('Invalid JDBC URL'); - } - }, - //# Filter connection properties - connection_config: function(opts) { - var config, k, v; - config = {}; - for (k in opts) { - v = opts[k]; - if (k !== 'admin_username' && k !== 'admin_password' && k !== 'database' && k !== 'engine' && k !== 'host' && k !== 'port' && k !== 'silent') { - continue; - } - config[k] = v; - } - return config; - } - } + db: db, }; diff --git a/packages/db/package.json b/packages/db/package.json index 361cd21d3..38c8ee674 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -1,5 +1,6 @@ { "name": "@nikitajs/db", + "version": "1.0.0-alpha.3", "description": "Provides Nikita actions for various database operations. Currently supports PostgreSQL, MySQL and MariaDB.", "keywords": [ "nikita", @@ -17,7 +18,6 @@ "system", "task" ], - "version": "1.0.0-alpha.3", "author": "David Worms ", "bugs": { "url": "https://github.com/adaltas/node-nikita/issues" @@ -57,20 +57,26 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + "./register": "./lib/register.js", + "./utils": "./lib/utils/index.js", + "./utils/*": "./lib/utils/*.js", + "./*": "./lib/*/index.js" + }, "files": [ "/lib" ], "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/db/lib/register" - ], "inline-diffs": true, - "timeout": 400000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/db/register", + "should" + ], + "throw-deprecation": true, + "timeout": 50000 }, "publishConfig": { "access": "public" @@ -89,5 +95,6 @@ }, "dependencies": { "dedent": "^1.2.0" - } + }, + "type": "module" } diff --git a/packages/db/test.sample.coffee b/packages/db/test.sample.coffee index 21584a64d..afa4cade5 100644 --- a/packages/db/test.sample.coffee +++ b/packages/db/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: db: false # disable_db docker: # eg `docker-machine create --driver virtualbox nikita` @@ -14,5 +14,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/db/test/database/exists.coffee b/packages/db/test/database/exists.coffee index 1f2454fd6..3fef47223 100644 --- a/packages/db/test/database/exists.coffee +++ b/packages/db/test/database/exists.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, db} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) -return unless tags.db - -for engine, _ of db +for engine, _ of test.db describe "db.database.exists #{engine}", -> + return unless test.tags.db they 'database missing', ({ssh}) -> nikita $ssh: ssh - db: db[engine] + db: test.db[engine] , ({tools: {status}}) -> {exists} = await @db.database.exists database: 'test_database_exists_0_db' exists.should.be.false() @@ -23,13 +23,13 @@ for engine, _ of db they 'database exists', ({ssh}) -> nikita $ssh: ssh - db: db[engine] + db: test.db[engine] , ({tools: {status}}) -> - @db.database.remove 'test_database_exists_1_db', $shy: true - @db.database 'test_database_exists_1_db', $shy: true + await @db.database.remove 'test_database_exists_1_db', $shy: true + await @db.database 'test_database_exists_1_db', $shy: true {exists} = await @db.database.exists database: 'test_database_exists_1_db' exists.should.be.true() {exists} = await @db.database.exists 'test_database_exists_1_db' exists.should.be.true() - @db.database.remove 'test_database_exists_1_db', $shy: true + await @db.database.remove 'test_database_exists_1_db', $shy: true status().should.be.false() diff --git a/packages/db/test/database/index.coffee b/packages/db/test/database/index.coffee index 2b6316f97..7ecb62b2d 100644 --- a/packages/db/test/database/index.coffee +++ b/packages/db/test/database/index.coffee @@ -1,19 +1,19 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, db} = require '../test' -they = require('mocha-they')(config) -utils = require '../../lib/utils' +import nikita from '@nikitajs/core' +import utils from '@nikitajs/db/utils' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) -return unless tags.db - -for engine, _ of db then do (engine) -> +for engine, _ of test.db then do (engine) -> describe "db.database #{engine}", -> + return unless test.tags.db they 'database as an argument', ({ssh}) -> {exists} = await nikita $ssh: ssh - db: db[engine] + db: test.db[engine] .db.database.remove 'db_create_0' .db.database 'db_create_0' .db.database.exists 'db_create_0' @@ -22,50 +22,50 @@ for engine, _ of db then do (engine) -> they 'output `$status`', ({ssh}) -> nikita $ssh: ssh - db: db[engine] + db: test.db[engine] , -> - @db.database.remove 'db_create_1' + await @db.database.remove 'db_create_1' {$status} = await @db.database 'db_create_1' $status.should.be.true() {$status} = await @db.database 'db_create_1' $status.should.be.false() - @db.database.remove 'db_create_1' + await @db.database.remove 'db_create_1' describe 'user', -> they 'which is existing', ({ssh}) -> nikita $ssh: ssh - db: db[engine] + db: test.db[engine] , -> - @db.database.remove 'db_create_3' - @db.user.remove 'db_create_user_3' - @db.user + await @db.database.remove 'db_create_3' + await @db.user.remove 'db_create_user_3' + await @db.user username: 'db_create_user_3' password: 'db_create_user_3' - @db.database + await @db.database database: 'db_create_3' user: 'db_create_user_3' # Todo: why not using nikita.user.exists ? {$status: user_exists} = await @execute command: switch engine - when 'mariadb', 'mysql' then utils.db.command(db[engine], database: 'mysql', "SELECT user FROM db WHERE db='db_create_3';") + " | grep 'db_create_user_3'" - when 'postgresql' then utils.db.command(db[engine], database: 'db_create_3', '\\l') + " | egrep '^db_create_user_3='" + when 'mariadb', 'mysql' then utils.db.command(test.db[engine], database: 'mysql', "SELECT user FROM db WHERE db='db_create_3';") + " | grep 'db_create_user_3'" + when 'postgresql' then utils.db.command(test.db[engine], database: 'db_create_3', '\\l') + " | egrep '^db_create_user_3='" user_exists.should.be.true() - @db.database.remove 'db_create_3' - @db.user.remove 'db_create_user_3' + await @db.database.remove 'db_create_3' + await @db.user.remove 'db_create_user_3' they 'output `$status`', ({ssh}) -> nikita $ssh: ssh - db: db[engine] + db: test.db[engine] , -> - @db.database.remove 'db_create_3' - @db.user.remove 'db_create_user_3' - @db.user + await @db.database.remove 'db_create_3' + await @db.user.remove 'db_create_user_3' + await @db.user username: 'db_create_user_3' password: 'db_create_user_3' - @db.database + await @db.database database: 'db_create_3' {$status} = await @db.database database: 'db_create_3' @@ -75,23 +75,23 @@ for engine, _ of db then do (engine) -> database: 'db_create_3' user: 'db_create_user_3' $status.should.be.false() - @db.database.remove 'db_create_3' - @db.user.remove 'db_create_user_3' + await @db.database.remove 'db_create_3' + await @db.user.remove 'db_create_user_3' they 'which is not existing', ({ssh}) -> nikita $ssh: ssh - db: db[engine] + db: test.db[engine] , -> try - @db.database.remove 'db_create_4' - @db.user.remove 'db_create_user_4' + await @db.database.remove 'db_create_4' + await @db.user.remove 'db_create_user_4' await @db.database database: 'db_create_4' user: 'db_create_user_4' throw Error 'Oh no' catch err - err.message.should.eql 'DB user does not exists: db_create_user_4' + err.message.should.eql 'DB user does not exists: db_create_user_4' finally - @db.database.remove 'db_create_4' - @db.user.remove 'db_create_user_4' + await @db.database.remove 'db_create_4' + await @db.user.remove 'db_create_user_4' diff --git a/packages/db/test/database/wait.coffee b/packages/db/test/database/wait.coffee index bd98ca5d4..b6371f490 100644 --- a/packages/db/test/database/wait.coffee +++ b/packages/db/test/database/wait.coffee @@ -1,37 +1,37 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, db} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) -return unless tags.db - -for engine, _ of db +for engine, _ of test.db describe "db.database.wait #{engine}", -> + return unless test.tags.db they 'is already created', ({ssh}) -> nikita $ssh: ssh - db: db[engine] + db: test.db[engine] , -> - @db.database.remove 'db_wait_1' - @db.database 'db_wait_0' + await @db.database.remove 'db_wait_1' + await @db.database 'db_wait_0' {$status} = await @db.database.wait 'db_wait_0' $status.should.be.false() - @db.database.remove 'db_wait_0' + await @db.database.remove 'db_wait_0' they 'is not yet created', ({ssh}) -> setTimeout -> nikita $ssh: ssh - db: db[engine] + db: test.db[engine] .db.database 'db_wait_1' , 200 nikita $ssh: ssh - db: db[engine] + db: test.db[engine] , -> - @db.database.remove 'db_wait_1' + await @db.database.remove 'db_wait_1' {$status} = await @db.database.wait 'db_wait_1' $status.should.be.true() - @db.database.remove 'db_wait_1' + await @db.database.remove 'db_wait_1' diff --git a/packages/db/test/query.coffee b/packages/db/test/query.coffee index 7aa3cb866..6db2c44d9 100644 --- a/packages/db/test/query.coffee +++ b/packages/db/test/query.coffee @@ -1,110 +1,108 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, db} = require './test' -they = require('mocha-they')(config) - -return unless tags.db - -describe "db.query", -> +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) - for engine, _ of db then do (engine) -> +for engine, _ of test.db then do (engine) -> - describe "#{engine}", -> + describe "db.query #{engine}", -> + return unless test.tags.db - they 'schema required', ({ssh}) -> - nikita - $ssh: ssh - .db.query - command: 'select * from doesntmatter' - .should.be.rejectedWith [ - "NIKITA_SCHEMA_VALIDATION_CONFIG:" - "multiple errors were found in the configuration of action `db.query`:" - "module://@nikitajs/db/lib/query#/definitions/db/required config must have required property 'admin_password';" - "module://@nikitajs/db/lib/query#/definitions/db/required config must have required property 'admin_username';" - "module://@nikitajs/db/lib/query#/definitions/db/required config must have required property 'engine';" - "module://@nikitajs/db/lib/query#/definitions/db/required config must have required property 'host'." - ].join ' ' + they 'schema required', ({ssh}) -> + nikita + $ssh: ssh + .db.query + command: 'select * from doesntmatter' + .should.be.rejectedWith [ + "NIKITA_SCHEMA_VALIDATION_CONFIG:" + "multiple errors were found in the configuration of action `db.query`:" + "module://@nikitajs/db/query#/definitions/db/required config must have required property 'admin_password';" + "module://@nikitajs/db/query#/definitions/db/required config must have required property 'admin_username';" + "module://@nikitajs/db/query#/definitions/db/required config must have required property 'engine';" + "module://@nikitajs/db/query#/definitions/db/required config must have required property 'host'." + ].join ' ' - they 'config command', ({ssh}) -> - nikita - $ssh: ssh - db: db[engine] - , -> - @db.database.remove 'test_query_1' - @db.database 'test_query_1' - {$status, stdout} = await @db.query - database: 'test_query_1' - command: """ - CREATE TABLE a_table (a_col CHAR(5)); - INSERT INTO a_table (a_col) VALUES ('value'); - select * from a_table - """ - $status.should.be.true() - stdout.should.eql 'value\n' + they 'config command', ({ssh}) -> + nikita + $ssh: ssh + db: test.db[engine] + , -> + @db.database.remove 'test_query_1' + @db.database 'test_query_1' + {$status, stdout} = await @db.query + database: 'test_query_1' + command: """ + CREATE TABLE a_table (a_col CHAR(5)); + INSERT INTO a_table (a_col) VALUES ('value'); + select * from a_table + """ + $status.should.be.true() + stdout.should.eql 'value\n' - they 'config trim', ({ssh}) -> - nikita - $ssh: ssh - db: db[engine] - , -> - @db.database.remove 'test_query_1' - @db.database 'test_query_1' - {stdout} = await @db.query - database: 'test_query_1' - command: """ - CREATE TABLE a_table (a_col CHAR(5)); - INSERT INTO a_table (a_col) VALUES ('value'); - select * from a_table - """ - trim: true - stdout.should.eql 'value' + they 'config trim', ({ssh}) -> + nikita + $ssh: ssh + db: test.db[engine] + , -> + @db.database.remove 'test_query_1' + @db.database 'test_query_1' + {stdout} = await @db.query + database: 'test_query_1' + command: """ + CREATE TABLE a_table (a_col CHAR(5)); + INSERT INTO a_table (a_col) VALUES ('value'); + select * from a_table + """ + trim: true + stdout.should.eql 'value' - they 'config grep with string', ({ssh}) -> - nikita - $ssh: ssh - db: db[engine] - , -> - @db.database.remove 'test_query_1' - @db.database 'test_query_1' - @db.query - database: 'test_query_1' - command: ''' - CREATE TABLE a_table (a_col CHAR(5)); - INSERT INTO a_table (a_col) VALUES ('value'); - ''' - {$status} = await @db.query - database: 'test_query_1' - command: ''' - select * from a_table - ''' - grep: 'value' - $status.should.be.true() - {$status} = await @db.query - database: 'test_query_1' - command: 'select * from a_table' - grep: 'invalid value' - $status.should.be.false() + they 'config grep with string', ({ssh}) -> + nikita + $ssh: ssh + db: test.db[engine] + , -> + @db.database.remove 'test_query_1' + @db.database 'test_query_1' + @db.query + database: 'test_query_1' + command: ''' + CREATE TABLE a_table (a_col CHAR(5)); + INSERT INTO a_table (a_col) VALUES ('value'); + ''' + {$status} = await @db.query + database: 'test_query_1' + command: ''' + select * from a_table + ''' + grep: 'value' + $status.should.be.true() + {$status} = await @db.query + database: 'test_query_1' + command: 'select * from a_table' + grep: 'invalid value' + $status.should.be.false() - they 'config grep with regexp', ({ssh}) -> - nikita - $ssh: ssh - db: db[engine] - , -> - @db.database.remove 'test_query_1' - @db.database 'test_query_1' - @db.query - database: 'test_query_1' - command: ''' - CREATE TABLE a_table (a_col CHAR(5)); - INSERT INTO a_table (a_col) VALUES ('value'); - ''' - {$status} = await @db.query - database: 'test_query_1' - command: 'select * from a_table' - grep: /^val.*$/ - $status.should.be.true() - {$status} = await @db.query - database: 'test_query_1' - command: 'select * from a_table' - grep: /^val$/ - $status.should.be.false() + they 'config grep with regexp', ({ssh}) -> + nikita + $ssh: ssh + db: test.db[engine] + , -> + @db.database.remove 'test_query_1' + @db.database 'test_query_1' + @db.query + database: 'test_query_1' + command: ''' + CREATE TABLE a_table (a_col CHAR(5)); + INSERT INTO a_table (a_col) VALUES ('value'); + ''' + {$status} = await @db.query + database: 'test_query_1' + command: 'select * from a_table' + grep: /^val.*$/ + $status.should.be.true() + {$status} = await @db.query + database: 'test_query_1' + command: 'select * from a_table' + grep: /^val$/ + $status.should.be.false() diff --git a/packages/db/test/schema/exists.coffee b/packages/db/test/schema/exists.coffee index fd7473317..bc5c35958 100644 --- a/packages/db/test/schema/exists.coffee +++ b/packages/db/test/schema/exists.coffee @@ -1,21 +1,20 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, db} = require '../test' -they = require('mocha-they')(config) - -return unless tags.db +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'db.schema.exists postgres', -> - - return unless db.postgresql + return unless test.tags.db + return unless test.db.postgresql they 'output exists', ({ssh}) -> nikita $ssh: ssh - db: db.postgresql + db: test.db.postgresql , -> - @db.database.remove 'schema_exists_0' - @db.database 'schema_exists_0' + await @db.database.remove 'schema_exists_0' + await @db.database 'schema_exists_0' {exists} = await @db.schema.exists schema: 'schema_exists_0' database: 'schema_exists_0' @@ -27,4 +26,4 @@ describe 'db.schema.exists postgres', -> schema: 'schema_exists_0' database: 'schema_exists_0' exists.should.be.true() - @db.database.remove 'schema_exists_0' + await @db.database.remove 'schema_exists_0' diff --git a/packages/db/test/schema/index.coffee b/packages/db/test/schema/index.coffee index b9c1791f1..337ce2956 100644 --- a/packages/db/test/schema/index.coffee +++ b/packages/db/test/schema/index.coffee @@ -1,21 +1,20 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, db} = require '../test' -they = require('mocha-they')(config) - -return unless tags.db +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'db.schema postgres', -> - - return unless db.postgresql + return unless test.tags.db + return unless test.db.postgresql they 'status on new schema with no owner (existing db)', ({ssh}) -> nikita $ssh: ssh - db: db.postgresql + db: test.db.postgresql , -> - @db.database.remove 'postgres_db_0' - @db.database 'postgres_db_0' + await @db.database.remove 'postgres_db_0' + await @db.database 'postgres_db_0' {$status} = await @db.schema schema: 'postgres_schema_0' database: 'postgres_db_0' @@ -24,16 +23,16 @@ describe 'db.schema postgres', -> schema: 'postgres_schema_0' database: 'postgres_db_0' $status.should.be.false() - @db.database.remove 'postgres_db_0' + await @db.database.remove 'postgres_db_0' they 'add new schema with not existing owner (existing db)', ({ssh}) -> nikita $ssh: ssh - db: db.postgresql + db: test.db.postgresql , -> try - @db.database.remove 'postgres_db_1' - @db.database 'postgres_db_1' + await @db.database.remove 'postgres_db_1' + await @db.database 'postgres_db_1' await @db.schema schema: 'postgres_schema_1' database: 'postgres_db_1' @@ -42,19 +41,19 @@ describe 'db.schema postgres', -> catch err err.message.should.eql 'Owner Johny does not exists' finally - @db.database.remove 'postgres_db_1' + await @db.database.remove 'postgres_db_1' they 'add new schema with existing owner (existing db)', ({ssh}) -> nikita $ssh: ssh - db: db.postgresql + db: test.db.postgresql , -> - @db.database.remove 'postgres_db_2' - @db.user.remove 'postgres_user_2' - @db.user + await @db.database.remove 'postgres_db_2' + await @db.user.remove 'postgres_user_2' + await @db.user username: 'postgres_user_2' password: 'postgres_user_2' - @db.database + await @db.database database: 'postgres_db_2' user: 'postgres_user_2' {$status} = await @db.schema @@ -62,13 +61,13 @@ describe 'db.schema postgres', -> database: 'postgres_db_2' owner: 'postgres_user_2' $status.should.be.true() - @db.database.remove 'postgres_db_2' - @db.user.remove 'postgres_user_2' + await @db.database.remove 'postgres_db_2' + await @db.user.remove 'postgres_user_2' they 'add new schema with no owner (not existing db)', ({ssh}) -> nikita $ssh: ssh - db: db.postgresql + db: test.db.postgresql , -> @db.schema schema: 'postgres_schema_4' @@ -79,15 +78,15 @@ describe 'db.schema postgres', -> they 'add new schema after adding database and user', ({ssh}) -> nikita $ssh: ssh - db: db.postgresql + db: test.db.postgresql , -> - @db.database.remove 'postgres_db_5' - @db.user.remove 'nikita_test_5' - @db.user + await @db.database.remove 'postgres_db_5' + await @db.user.remove 'nikita_test_5' + await @db.user username: 'nikita_test_5' password: 'secret' engine: 'postgresql' - @db.database + await @db.database user: 'nikita_test_5' database: 'postgres_db_5' {$status} = await @db.schema @@ -95,5 +94,5 @@ describe 'db.schema postgres', -> schema: 'postgres_schema_5' owner: 'nikita_test_5' $status.should.be.true() - @db.database.remove 'postgres_db_5' - @db.user.remove 'nikita_test_5' + await @db.database.remove 'postgres_db_5' + await @db.user.remove 'nikita_test_5' diff --git a/packages/db/test/schema/list.coffee b/packages/db/test/schema/list.coffee index 47e15933e..20bbe9df9 100644 --- a/packages/db/test/schema/list.coffee +++ b/packages/db/test/schema/list.coffee @@ -1,35 +1,34 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, db} = require '../test' -they = require('mocha-they')(config) - -return unless tags.db +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'db.schema.list postgres', -> - - return unless db.postgresql + return unless test.tags.db + return unless test.db.postgresql they 'list', ({ssh}) -> nikita $ssh: ssh - db: db.postgresql + db: test.db.postgresql , -> # Clean - @db.database.remove 'db_schema_list_0_db' - @db.user.remove 'db_schema_list_0_usr' + await @db.database.remove 'db_schema_list_0_db' + await @db.user.remove 'db_schema_list_0_usr' # Prepare - @db.user + await @db.user username: 'db_schema_list_0_usr' password: 'secret' - @db.database + await @db.database user: 'db_schema_list_0_usr' database: 'db_schema_list_0_db' # Without a user - @db.schema + await @db.schema database: 'db_schema_list_0_db' schema: 'db_schema_list_0_sch_0' # With a user - @db.schema + await @db.schema database: 'db_schema_list_0_db' schema: 'db_schema_list_0_sch_1' owner: 'db_schema_list_0_usr' @@ -38,8 +37,8 @@ describe 'db.schema.list postgres', -> schemas.should.eql [ { name: 'db_schema_list_0_sch_0', owner: 'root' } { name: 'db_schema_list_0_sch_1', owner: 'db_schema_list_0_usr' } - { name: 'public', owner: 'root' } + { name: 'public', owner: 'pg_database_owner' } ] # Clean - @db.database.remove 'db_schema_list_0_db' - @db.user.remove 'db_schema_list_0_usr' + await @db.database.remove 'db_schema_list_0_db' + await @db.user.remove 'db_schema_list_0_usr' diff --git a/packages/db/test/schema/remove.coffee b/packages/db/test/schema/remove.coffee index a8e2121a1..667fe2083 100644 --- a/packages/db/test/schema/remove.coffee +++ b/packages/db/test/schema/remove.coffee @@ -1,35 +1,34 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, db} = require '../test' -they = require('mocha-they')(config) - -return unless tags.db +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'db.schema.remove postgres', -> - - return unless db.postgresql + return unless test.tags.db + return unless test.db.postgresql they 'does not exists', ({ssh}) -> nikita $ssh: ssh - db: db.postgresql + db: test.db.postgresql , -> - @db.database.remove 'schema_remove_0' - @db.database 'schema_remove_0' + await @db.database.remove 'schema_remove_0' + await @db.database 'schema_remove_0' {$status} = await @db.schema.remove schema: 'schema_remove_0' database: 'schema_remove_0' $status.should.be.false() - @db.database.remove 'schema_remove_0' + await @db.database.remove 'schema_remove_0' they 'output exists', ({ssh}) -> nikita $ssh: ssh - db: db.postgresql + db: test.db.postgresql , -> - @db.database.remove 'schema_remove_1' - @db.database 'schema_remove_1' - @db.schema + await @db.database.remove 'schema_remove_1' + await @db.database 'schema_remove_1' + await @db.schema schema: 'schema_remove_1' database: 'schema_remove_1' {$status} = await @db.schema.remove @@ -40,4 +39,4 @@ describe 'db.schema.remove postgres', -> schema: 'schema_remove_1' database: 'schema_remove_1' $status.should.be.false() - @db.database.remove 'schema_remove_1' + await @db.database.remove 'schema_remove_1' diff --git a/packages/db/test/test.coffee b/packages/db/test/test.coffee index f17f2b1bb..ccc602447 100644 --- a/packages/db/test/test.coffee +++ b/packages/db/test/test.coffee @@ -1,14 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config +export default config.default diff --git a/packages/db/test/user/exists.coffee b/packages/db/test/user/exists.coffee index ca74b9c46..bdfb7c8e7 100644 --- a/packages/db/test/user/exists.coffee +++ b/packages/db/test/user/exists.coffee @@ -1,20 +1,20 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, db} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) -return unless tags.db - -for engine, _ of db +for engine, _ of test.db describe "db.user.exists #{engine}", -> + return unless test.tags.db they 'user not created', ({ssh}) -> nikita $ssh: ssh - db: db[engine] + db: test.db[engine] , -> - @db.user.remove 'test_user_exists_1_user' + await @db.user.remove 'test_user_exists_1_user' {exists} = await @db.user.exists username: 'test_user_exists_1_user' exists.should.be.false() @@ -22,16 +22,16 @@ for engine, _ of db they 'with status as false as true', ({ssh}) -> nikita $ssh: ssh - db: db[engine] + db: test.db[engine] , ({tools: {status}})-> - @db.user.remove 'test_user_exists_2_user', $shy: true - @db.user + await @db.user.remove 'test_user_exists_2_user', $shy: true + await @db.user username: 'test_user_exists_2_user' password: 'test_user_exists_2_password' $shy: true {$status} = await @db.user.exists username: 'test_user_exists_2_user' $status.should.be.true() - @db.user.remove 'test_user_exists_2_user', $shy: true + await @db.user.remove 'test_user_exists_2_user', $shy: true # Modules of type exists shall be shy status().should.be.false() diff --git a/packages/db/test/user/index.coffee b/packages/db/test/user/index.coffee index 837b10936..381564af5 100644 --- a/packages/db/test/user/index.coffee +++ b/packages/db/test/user/index.coffee @@ -1,14 +1,13 @@ -nikita = require '@nikitajs/core/lib' -{command} = require '../../lib/query' -{tags, config, db} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) -return unless tags.db - -for engine, _ of db +for engine, _ of test.db describe "db.user #{engine}", -> + return unless test.tags.db they 'requires host, hostname, username', ({ssh}) -> nikita @@ -17,15 +16,15 @@ for engine, _ of db @db.user port: 5432 engine: engine - admin_username: db[engine].admin_username - admin_password: db[engine].admin_password + admin_username: test.db[engine].admin_username + admin_password: test.db[engine].admin_password .should.be.rejectedWith message: [ 'NIKITA_SCHEMA_VALIDATION_CONFIG:' 'multiple errors were found in the configuration of action `db.user`:' '#/required config must have required property \'password\';' '#/required config must have required property \'username\';' - 'module://@nikitajs/db/lib/query#/definitions/db/required config must have required property \'host\'.' + 'module://@nikitajs/db/query#/definitions/db/required config must have required property \'host\'.' ].join ' ' they 'requires admin_username, password, username', ({ssh}) -> @@ -36,22 +35,22 @@ for engine, _ of db host: 'localhost' port: 5432 engine: engine - admin_password: db[engine].admin_password + admin_password: test.db[engine].admin_password .should.be.rejectedWith message: [ 'NIKITA_SCHEMA_VALIDATION_CONFIG:' 'multiple errors were found in the configuration of action `db.user`:' '#/required config must have required property \'password\';' '#/required config must have required property \'username\';' - 'module://@nikitajs/db/lib/query#/definitions/db/required config must have required property \'admin_username\'.' + 'module://@nikitajs/db/query#/definitions/db/required config must have required property \'admin_username\'.' ].join ' ' they 'add new user', ({ssh}) -> nikita $ssh: ssh - db: db[engine] + db: test.db[engine] , -> - @db.user.remove 'test_user_1_user' + await @db.user.remove 'test_user_1_user' {$status} = await @db.user username: 'test_user_1_user' password: 'test_user_1_password' @@ -63,28 +62,28 @@ for engine, _ of db {exists} = await @db.user.exists username: 'test_user_1_user' exists.should.be.true() - @db.user.remove 'test_user_1_user' + await @db.user.remove 'test_user_1_user' they 'change password', ({ssh}) -> nikita $ssh: ssh - db: db[engine] + db: test.db[engine] , -> - @db.database.remove 'test_user_2_db' - @db.user.remove 'test_user_2_user' - @db.user + await @db.database.remove 'test_user_2_db' + await @db.user.remove 'test_user_2_user' + await @db.user username: 'test_user_2_user' password: 'test_user_2_invalid' - @db.database + await @db.database database: 'test_user_2_db' user: 'test_user_2_user' - @db.user + await @db.user username: 'test_user_2_user' password: 'test_user_2_valid' - @db.query + await @db.query engine: engine - host: db[engine].host - port: db[engine].port + host: test.db[engine].host + port: test.db[engine].port database: 'test_user_2_db' admin_username: 'test_user_2_user' admin_password: 'test_user_2_valid' @@ -93,5 +92,5 @@ for engine, _ of db 'show tables' when 'postgresql' '\\dt' - @db.database.remove 'test_user_2_db' - @db.user.remove 'test_user_2_user' + await @db.database.remove 'test_user_2_db' + await @db.user.remove 'test_user_2_user' diff --git a/packages/db/test/utils/command.coffee b/packages/db/test/utils/command.coffee new file mode 100644 index 000000000..4e5dd7560 --- /dev/null +++ b/packages/db/test/utils/command.coffee @@ -0,0 +1,112 @@ + +import { command } from '@nikitajs/db/utils/db' + +describe 'db.utils.command', -> + + it 'invalid engine', -> + () -> command + admin_password: 'rootme' + admin_username: 'root' + host: 'localhost' + engine: 'invalid_engine' + .should.throw 'Unsupported engine: "invalid_engine"' + + it 'required arguments', -> + () -> command + admin_password: 'rootme' + host: 'local' + engine: 'mariadb' + .should.throw + code: 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS' + message: [ + 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS:' + 'Missing required argument: "admin_username"' + ].join ' ' + () -> command + admin_username: 'root' + host: 'local' + engine: 'mariadb' + .should.throw + code: 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS' + message: [ + 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS:' + 'Missing required argument: "admin_password"' + ].join ' ' + () -> command + admin_password: 'rootme' + admin_username: 'root' + engine: 'mariadb' + .should.throw + code: 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS' + message: [ + 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS:' + 'Missing required argument: "host"' + ].join ' ' + () -> command + admin_password: 'rootme' + engine: 'mariadb' + .should.throw + code: 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS' + message: new RegExp 'Missing required argument:' + + describe 'using engine: mariadb', -> + + it 'default values', -> + command + admin_password: 'rootme' + admin_username: 'root' + engine: 'mariadb' + host: 'localhost' + .should.equal 'mysql -hlocalhost -P3306 -uroot -p\'rootme\'' + + it 'user values', -> + command + admin_password: 'password' + admin_username: 'test_user' + engine: 'mariadb' + host: 'mariadb' + port: 1729 + .should.equal 'mysql -hmariadb -P1729 -utest_user -p\'password\'' + + it 'command option', -> + command + admin_password: 'password' + admin_username: 'test_user' + engine: 'mariadb' + host: 'mariadb' + port: 1729 + command: ''' + show databases; + ''' + .should.equal 'mysql -hmariadb -P1729 -utest_user -p\'password\' -e "show databases;"' + + describe 'using engine: postgresql', -> + + it 'default values', -> + command + admin_password: 'rootme' + admin_username: 'root' + engine: 'postgresql' + host: 'localhost' + .should.equal 'PGPASSWORD=rootme psql -h localhost -p 5432 -U root -tAq' + + it 'user values', -> + command + admin_password: 'password' + admin_username: 'test_user' + engine: 'postgresql' + host: 'postgresql' + port: 1729 + .should.equal 'PGPASSWORD=password psql -h postgresql -p 1729 -U test_user -tAq' + + it 'command option', -> + command + admin_password: 'password' + admin_username: 'test_user' + engine: 'postgresql' + host: 'postgresql' + port: 1729 + command: ''' + show databases; + ''' + .should.equal 'PGPASSWORD=password psql -h postgresql -p 1729 -U test_user -tAq -c "show databases;"' diff --git a/packages/db/test/utils/escape.coffee b/packages/db/test/utils/escape.coffee new file mode 100644 index 000000000..43060f49a --- /dev/null +++ b/packages/db/test/utils/escape.coffee @@ -0,0 +1,16 @@ + +import { escape } from '@nikitajs/db/utils/db' + +describe 'db.utils.escape', -> + + it 'backslashes', -> + escape('\\').should.eql '\\\\' + + it 'double quotes', -> + escape('"').should.eql '\\"' + + it 'backslashes and double quotes', -> + query = 'SELECT * FROM my_db WHERE name = "John\\\'s"' + expected = 'SELECT * FROM my_db WHERE name = \\"John\\\\\'s\\"' + escape(query).should.eql expected + \ No newline at end of file diff --git a/packages/db/test/utils/index.coffee b/packages/db/test/utils/index.coffee deleted file mode 100644 index 997193d72..000000000 --- a/packages/db/test/utils/index.coffee +++ /dev/null @@ -1,142 +0,0 @@ -utils = require '../../lib/utils' -{command, escape, jdbc} = utils.db - -describe 'db.utils', -> - - describe 'escape', -> - - it 'backslashes', -> - escape('\\').should.eql '\\\\' - - it 'double quotes', -> - escape('"').should.eql '\\"' - - it 'backslashes and double quotes', -> - query = 'SELECT * FROM my_db WHERE name = "John\\\'s"' - expected = 'SELECT * FROM my_db WHERE name = \\"John\\\\\'s\\"' - escape(query).should.eql expected - - describe 'jdbc', -> - - it 'get default port', -> - jdbc('jdbc:mysql://localhost/my_db').should.eql - engine: 'mysql', - addresses: [ { host: 'localhost', port: 3306 } ], - database: 'my_db' - jdbc('jdbc:postgresql://localhost/my_db').should.eql - engine: 'postgresql', - addresses: [ { host: 'localhost', port: 5432 } ], - database: 'my_db' - - it 'get database', -> - jdbc('jdbc:mysql://master3.ryba:3306/my_db?a_param=true').database.should.eql 'my_db' - - describe 'command', -> - - it 'invalid engine', -> - () -> command - admin_password: 'rootme' - admin_username: 'root' - host: 'localhost' - engine: 'invalid_engine' - .should.throw 'Unsupported engine: "invalid_engine"' - - it 'required arguments', -> - () -> command - admin_password: 'rootme' - host: 'local' - engine: 'mariadb' - .should.throw - code: 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS' - message: [ - 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS:' - 'Missing required argument: "admin_username"' - ].join ' ' - () -> command - admin_username: 'root' - host: 'local' - engine: 'mariadb' - .should.throw - code: 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS' - message: [ - 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS:' - 'Missing required argument: "admin_password"' - ].join ' ' - () -> command - admin_password: 'rootme' - admin_username: 'root' - engine: 'mariadb' - .should.throw - code: 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS' - message: [ - 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS:' - 'Missing required argument: "host"' - ].join ' ' - () -> command - admin_password: 'rootme' - engine: 'mariadb' - .should.throw - code: 'NIKITA_DB_UTILS_REQUIRED_ARGUMENTS' - message: new RegExp 'Missing required argument:' - - describe 'using engine: mariadb', -> - - it 'default values', -> - command - admin_password: 'rootme' - admin_username: 'root' - engine: 'mariadb' - host: 'localhost' - .should.equal 'mysql -hlocalhost -P3306 -uroot -p\'rootme\'' - - it 'user values', -> - command - admin_password: 'password' - admin_username: 'test_user' - engine: 'mariadb' - host: 'mariadb' - port: 1729 - .should.equal 'mysql -hmariadb -P1729 -utest_user -p\'password\'' - - it 'command option', -> - command - admin_password: 'password' - admin_username: 'test_user' - engine: 'mariadb' - host: 'mariadb' - port: 1729 - command: ''' - show databases; - ''' - .should.equal 'mysql -hmariadb -P1729 -utest_user -p\'password\' -e "show databases;"' - - describe 'using engine: postgresql', -> - - it 'default values', -> - command - admin_password: 'rootme' - admin_username: 'root' - engine: 'postgresql' - host: 'localhost' - .should.equal 'PGPASSWORD=rootme psql -h localhost -p 5432 -U root -tAq' - - it 'user values', -> - command - admin_password: 'password' - admin_username: 'test_user' - engine: 'postgresql' - host: 'postgresql' - port: 1729 - .should.equal 'PGPASSWORD=password psql -h postgresql -p 1729 -U test_user -tAq' - - it 'command option', -> - command - admin_password: 'password' - admin_username: 'test_user' - engine: 'postgresql' - host: 'postgresql' - port: 1729 - command: ''' - show databases; - ''' - .should.equal 'PGPASSWORD=password psql -h postgresql -p 1729 -U test_user -tAq -c "show databases;"' diff --git a/packages/db/test/utils/jdbc.coffee b/packages/db/test/utils/jdbc.coffee new file mode 100644 index 000000000..5f5b86574 --- /dev/null +++ b/packages/db/test/utils/jdbc.coffee @@ -0,0 +1,17 @@ + +import { jdbc } from '@nikitajs/db/utils/db' + +describe 'db.utils.jdbc', -> + + it 'get default port', -> + jdbc('jdbc:mysql://localhost/my_db').should.eql + engine: 'mysql', + addresses: [ { host: 'localhost', port: 3306 } ], + database: 'my_db' + jdbc('jdbc:postgresql://localhost/my_db').should.eql + engine: 'postgresql', + addresses: [ { host: 'localhost', port: 5432 } ], + database: 'my_db' + + it 'get database', -> + jdbc('jdbc:mysql://master3.ryba:3306/my_db?a_param=true').database.should.eql 'my_db' diff --git a/packages/docker/README.md b/packages/docker/README.md index de73fd5f3..b8ad1f221 100644 --- a/packages/docker/README.md +++ b/packages/docker/README.md @@ -2,3 +2,16 @@ # Nikita "docker" package The "docker" package provides Nikita actions for various Docker operations. + +## Usage + +```js +import "@nikitajs/docker/register"; +import nikita from "@nikitajs/core"; + +const {stdout} = await nikita.docker.exec({ + container: "my_container" + command: "whoami" +}); +console.info(stdout); +``` diff --git a/packages/docker/env/docker/Dockerfile b/packages/docker/env/docker/Dockerfile index 88465082d..92fffe746 100644 --- a/packages/docker/env/docker/Dockerfile +++ b/packages/docker/env/docker/Dockerfile @@ -1,21 +1,20 @@ -FROM ubuntu:focal -MAINTAINER David Worms +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " -# Install Node.js -# ping used in compose test -ENV NODE_VERSION stable -RUN \ - apt update -y \ - && apt install -y build-essential curl git iputils-ping \ - && curl -L https://git.io/n-install | bash -s -- -y \ - && /root/n/bin/n $NODE_VERSION - -# Install SSH -RUN DEBIAN_FRONTEND="noninteractive" apt-get install -y openssh-server \ - && ssh-keygen -t rsa -f ~/.ssh/id_rsa -N '' \ - && cat ~/.ssh/id_rsa.pub > ~/.ssh/authorized_keys \ - && ssh-keygen -A \ - && mkdir -p /run/sshd +RUN apt update -y && \ + DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC \ + apt install -y \ + # System + tzdata \ + # Node.js dependencies + build-essential curl git iputils-ping \ + # SSH server and client + openssh-server \ + # Sudo to start ssh + sudo && \ + # SSH configuration + ssh-keygen -A && \ + mkdir -p /run/sshd # Install Docker RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg \ @@ -30,4 +29,21 @@ ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita WORKDIR /nikita/packages/docker +# User as sudoer +RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +USER nikita + +# SSH certificate +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys + +# Node.js +ENV NODE_VERSION stable +RUN curl -L https://git.io/n-install | bash -s -- -y +ENV PATH /home/nikita/n/bin:$PATH + ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/docker/env/docker/docker-compose.yml b/packages/docker/env/docker/docker-compose.yml index 717345f3d..fc36be493 100644 --- a/packages/docker/env/docker/docker-compose.yml +++ b/packages/docker/env/docker/docker-compose.yml @@ -8,6 +8,9 @@ services: - ../../../../:/nikita depends_on: - dind + # Not working for now, waiting is implemented inside entrypoint.sh + # dind: + # condition: service_healthy environment: NIKITA_TEST_MODULE: /nikita/packages/docker/env/docker/test.coffee # DOCKER_HOST: 'tcp://dind:2375' @@ -15,8 +18,13 @@ services: image: docker:dind privileged: true environment: - - DOCKER_TLS_CERTDIR= + DOCKER_TLS_CERTDIR: '' expose: - "2375" - logging: - driver: none + # healthcheck: + # test: "bash -c 'echo > /dev/tcp/localhost/2375'" + # interval: 1s + # timeout: 5s + # retries: 50 + # logging: + # driver: none diff --git a/packages/docker/env/docker/entrypoint.sh b/packages/docker/env/docker/entrypoint.sh index e207a5622..a18f4392c 100755 --- a/packages/docker/env/docker/entrypoint.sh +++ b/packages/docker/env/docker/entrypoint.sh @@ -1,8 +1,20 @@ #!/bin/bash -set -e + +# Note, we had to disable the exit builtin because the until condition kill the +# script despite the documentation which state "the shell does not exit if the +# command that fails is part of the command list immediately following a while +# or until keyword" +# set -e # Start ssh daemon -/usr/sbin/sshd +sudo /usr/sbin/sshd +# Wait until Docker is ready +i=0; until echo > /dev/tcp/dind/2375; do + [[ i -eq 20 ]] && >&2 echo 'Docker not yet started after 20s' && exit 1 + ((i++)) + sleep 1 +done +# Test execution if test -t 0; then # We have TTY, so probably an interactive container... if [[ $@ ]]; then @@ -15,7 +27,5 @@ if test -t 0; then /bin/bash fi else - # Detached mode - . ~/.bashrc npm run test:local fi diff --git a/packages/docker/env/docker/run.sh b/packages/docker/env/docker/run.sh index 3eaa8bd58..9b4173181 100755 --- a/packages/docker/env/docker/run.sh +++ b/packages/docker/env/docker/run.sh @@ -1,4 +1,6 @@ #!/usr/bin/env bash cd `pwd`/`dirname ${BASH_SOURCE}` -docker compose up --abort-on-container-exit +# Use `--attach` to restrict attaching to the specified services, +# disabling logging for other services +docker compose up --abort-on-container-exit --attach nodejs diff --git a/packages/docker/env/docker/test.coffee b/packages/docker/env/docker/test.coffee index 5e6492237..3ae9231a7 100644 --- a/packages/docker/env/docker/test.coffee +++ b/packages/docker/env/docker/test.coffee @@ -1,5 +1,6 @@ +import os from "node:os" -module.exports = +export default tags: docker: true docker_volume: true @@ -17,6 +18,6 @@ module.exports = , label: 'remote' ssh: - host: '127.0.0.1', username: process.env.USER, - private_key_path: '~/.ssh/id_rsa' + host: '127.0.0.1', username: os.userInfo().username, + private_key_path: '~/.ssh/id_ed25519' ] diff --git a/packages/docker/lib/build/index.js b/packages/docker/lib/build/index.js index d3757453d..3394832c0 100644 --- a/packages/docker/lib/build/index.js +++ b/packages/docker/lib/build/index.js @@ -1,8 +1,8 @@ // Dependencies -const path = require("path"); -const utils = require("../utils"); -const definitions = require("./schema.json"); -const esa = utils.string.escapeshellarg; +import path from "node:path"; +import utils from "@nikitajs/docker/utils"; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; const errors = { NIKITA_DOCKER_BUILD_CONTENT_FILE_REQUIRED: function () { @@ -14,34 +14,22 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { - var k, line; - let number_of_step = 0; - // status unmodified if final tag already exists - const dockerfile_commands = [ - "CMD", - "LABEL", - "EXPOSE", - "ENV", - "ADD", - "COPY", - "ENTRYPOINT", - "VOLUME", - "USER", - "WORKDIR", - "ARG", - "ONBUILD", - "RUN", - "STOPSIGNAL", - "MAINTAINER", - ]; + // Normalization if (config.file && config.cwd == null) { config.cwd = path.dirname(config.file); } if (config.cwd && config.file == null) { config.file = path.resolve(config.cwd, "Dockerfile"); } + // Retrieve previous image + const { images: oldImages } = await this.docker.images({ + filters: { + reference: config.tag ? `${config.image}:${config.tag}` : config.image + } + }) + const oldID = oldImages.length === 1 ? oldImages[0].ID : null; // Make sure the Dockerfile exists if (!config.content) { await this.fs.assert(config.file); @@ -90,72 +78,27 @@ module.exports = { ].join(" "), cwd: config.cwd, }); - // Get the content of the Dockerfile - if (config.content) { - await this.file({ - content: config.content, - source: config.file, - target: ({ content }) => config.content = content, - from: config.from, - to: config.to, - match: config.match, - replace: config.replace, - append: config.append, - before: config.before, - write: config.write, - }); - } else { - // Read Dockerfile if necessary to count steps - log({ - message: `Reading Dockerfile from : ${config.file}`, - level: "INFO", - }); - ({ data: config.content } = await this.fs.base.readFile({ - target: config.file, - encoding: "utf8", - })); - } - const contentLines = utils.string.lines(config.content); - // Count steps - for (const line of contentLines) { - const [_, cmd] = /^(.*?)\s/.exec(line); - if (dockerfile_commands.includes(cmd)) { - number_of_step++; - } - } - let image_id = null; - // Count cache - const lines = utils.string.lines(stdout); - let number_of_cache = 0; - for (k in lines) { - line = lines[k]; - if (line.indexOf("Using cache") !== -1) { - number_of_cache = number_of_cache + 1; - } - if (line.indexOf("Successfully built") !== -1) { - image_id = line.split(" ").pop().toString(); + // Extract the new image ID + const { images: newImages } = await this.docker.images({ + filters: { + reference: config.tag ? `${config.image}:${config.tag}` : config.image } - } - const userargs = { - $status: number_of_step !== number_of_cache, - image: image_id, + }) + const [newImage] = newImages; + const { ID: newID } = newImage; + // Output + log( + "INFO", + oldID !== newID + ? `New image id ${newID}` + : `Identical image id ${newID}` + ); + return { + $status: oldID !== newID, + image_id: newID, stdout: stdout, stderr: stderr, }; - log( - userargs.$status - ? { - message: `New image id ${userargs.image}`, - level: "INFO", - module: "nikita/lib/docker/build", - } - : { - message: `Identical image id ${userargs.image}`, - level: "INFO", - module: "nikita/lib/docker/build", - } - ); - return userargs; }, metadata: { global: "docker", diff --git a/packages/docker/lib/build/schema.json b/packages/docker/lib/build/schema.json index 43c80160e..db7b8ae1b 100644 --- a/packages/docker/lib/build/schema.json +++ b/packages/docker/lib/build/schema.json @@ -27,7 +27,7 @@ "description": "Change the build working directory." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "file": { "type": "string", diff --git a/packages/docker/lib/compose/index.js b/packages/docker/lib/compose/index.js index 2f8fe886d..f0915c4e1 100644 --- a/packages/docker/lib/compose/index.js +++ b/packages/docker/lib/compose/index.js @@ -1,10 +1,10 @@ // Dependencies -const utils = require("../utils"); -const path = require("path"); -const definitions = require("./schema.json"); +import utils from "@nikitajs/docker/utils"; +import path from "node:path"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { find, log } }) { // Validate parameters if (config.target == null && config.content == null) { diff --git a/packages/docker/lib/cp/index.js b/packages/docker/lib/cp/index.js index 736d9ce57..9f38ef74d 100644 --- a/packages/docker/lib/cp/index.js +++ b/packages/docker/lib/cp/index.js @@ -1,13 +1,12 @@ // Dependencies -// const path = require('path'); -const utils = require("../utils"); -const definitions = require("./schema.json"); +import utils from "@nikitajs/docker/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { path } }) { - const [s, source_container, source_path] = /(.*:)?(.*)/.exec(config.source); - let [t, target_container, target_path] = /(.*:)?(.*)/.exec(config.target); + let [, source_container, source_path] = /(.*:)?(.*)/.exec(config.source); + let [, target_container, target_path] = /(.*:)?(.*)/.exec(config.target); if (source_container && target_container) { throw Error("Incompatible source and target config"); } diff --git a/packages/docker/lib/cp/schema.json b/packages/docker/lib/cp/schema.json index ea1ba7ef6..c4541a9a8 100644 --- a/packages/docker/lib/cp/schema.json +++ b/packages/docker/lib/cp/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "source": { "type": "string", diff --git a/packages/docker/lib/exec/index.js b/packages/docker/lib/exec/index.js index d8dc9539f..5dd42d64e 100644 --- a/packages/docker/lib/exec/index.js +++ b/packages/docker/lib/exec/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({ config, tools: {log} diff --git a/packages/docker/lib/exec/schema.json b/packages/docker/lib/exec/schema.json index e31bc0144..a5f397d18 100644 --- a/packages/docker/lib/exec/schema.json +++ b/packages/docker/lib/exec/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "code": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/code", + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/code", "default": {} }, "container": { @@ -11,10 +11,10 @@ "description": "Name/ID of the container" }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chown#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/base/chown#/definitions/config/properties/uid" }, "service": { "type": "boolean", @@ -22,7 +22,7 @@ "description": "If true, run container as a service, else run as a command, true by\ndefault." }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/base/chown#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/base/chown#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/docker/lib/images/README.md b/packages/docker/lib/images/README.md new file mode 100644 index 000000000..4a8e565c4 --- /dev/null +++ b/packages/docker/lib/images/README.md @@ -0,0 +1,35 @@ +# `nikita.docker.images` + +List Docker images. + +## Basic usage + +```js +const {count, images} = await nikita.docker.images() +console.info(`There are ${count} images:`); +images.map( (image) => { + console.info('- Containers:', image.Containers); + console.info(' CreatedAt:', image.CreatedAt); + console.info(' CreatedSince:', image.CreatedSince); + console.info(' Digest:', image.Digest); + console.info(' ID:', image.ID); + console.info(' Repository:', image.Repository); + console.info(' SharedSize:', image.SharedSize); + console.info(' Size:', image.Size); + console.info(' Tag:', image.Tag); + console.info(' UniqueSize:', image.UniqueSize); + console.info(' VirtualSize:', image.VirtualSize); +}) +``` + +## Using filter + +```js +const {count, images} = await nikita.docker.images({ + filters: { + label: 'nikita=1.0.0', + dangling: false + } +}) +console.info(`Found ${count} images matching the filter.`); +``` diff --git a/packages/docker/lib/images/index.js b/packages/docker/lib/images/index.js new file mode 100644 index 000000000..a92c0dfda --- /dev/null +++ b/packages/docker/lib/images/index.js @@ -0,0 +1,43 @@ +// Dependencies +import utils from '@nikitajs/core/utils' +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; + +// Action +export default { + handler: async function ({ config }) { + const images = await this.docker.tools + .execute({ + format: "jsonlines", + command: [ + "images", + "--format '{{json .}}'", + ...Object.keys(config.filters || []).map( + (property) => { + const value = config.filters[property]; + if (typeof value === 'string') { + return '--filter ' + esa(property) + "=" + esa(value) + }else if(typeof value === 'boolean'){ + return '--filter ' + esa(property) + "=" + esa(value ? 'true' : 'false') + }else { + throw utils.error('NIKITA_DOCKER_IMAGES_FILTER', [ + 'Unsupported filter value type,', + 'expect a string or a boolean value,', + "got ${JSON.stringify(property)}." + ]) + } + } + ), + ].filter(Boolean).join(' '), + }) + .then(({ data }) => data); + return { + count: images.length, + images: images, + }; + }, + metadata: { + shy: true, + definitions: definitions + }, +}; diff --git a/packages/docker/lib/images/schema.json b/packages/docker/lib/images/schema.json new file mode 100644 index 000000000..9d34fb435 --- /dev/null +++ b/packages/docker/lib/images/schema.json @@ -0,0 +1,33 @@ +{ + "config": { + "type": "object", + "properties": { + "filters": { + "type": "object", + "properties": { + "before": { + "type": "string", + "description": "Filter images created before the image with given id or reference." + }, + "dangling": { + "type": "boolean", + "description": "Dangling images are intermediate images which have not been assigned a repository and a tag. By default, docker list all images whether they dangling or not. Set the dangling filter to `true` to only list dangling images or to `false` to filter out dangling images." + }, + "label": { + "type": "string", + "description": "Filter images by label. Matches are against the label name as well as its value when separated by the equal sign. For example the expression `nikita.version=1.0` match agains the `nikita.version` label and its `1.0` value." + }, + "reference": { + "type": "string", + "description": "Filter images by reference. A reference includes the repository and tag name. Globing expression are accepted, for example `alp*:*`." + }, + "since": { + "type": "string", + "description": "Filter images created after the image with given id or reference." + } + }, + "description": "" + } + } + } +} diff --git a/packages/docker/lib/inspect/index.js b/packages/docker/lib/inspect/index.js index a1e5d98f2..b46d6ae54 100644 --- a/packages/docker/lib/inspect/index.js +++ b/packages/docker/lib/inspect/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { const isCointainerArray = Array.isArray(config.container); const { data: info } = await this.docker.tools.execute({ diff --git a/packages/docker/lib/inspect/schema.json b/packages/docker/lib/inspect/schema.json index c65df27db..06561c6cd 100644 --- a/packages/docker/lib/inspect/schema.json +++ b/packages/docker/lib/inspect/schema.json @@ -13,7 +13,7 @@ "description": "Name/ID of the container (array of containers not yet implemented)." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" } }, "required": [ diff --git a/packages/docker/lib/kill/index.js b/packages/docker/lib/kill/index.js index ca4063dc7..22042e31a 100644 --- a/packages/docker/lib/kill/index.js +++ b/packages/docker/lib/kill/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {$status} = await this.docker.tools.execute({ command: `ps | egrep ' ${config.container}$' | grep 'Up'`, diff --git a/packages/docker/lib/kill/schema.json b/packages/docker/lib/kill/schema.json index 4503c613a..6f600f845 100644 --- a/packages/docker/lib/kill/schema.json +++ b/packages/docker/lib/kill/schema.json @@ -7,7 +7,7 @@ "description": "Name/ID of the container." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "signal": { "type": [ diff --git a/packages/docker/lib/load/index.js b/packages/docker/lib/load/index.js index f39f812ee..ff74c83e1 100644 --- a/packages/docker/lib/load/index.js +++ b/packages/docker/lib/load/index.js @@ -1,81 +1,69 @@ // Dependencies -const dedent = require("dedent"); -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; -// ## Schema definitions -var handler, utils; - -// ## Handler -handler = async function ({ config, tools: { log } }) { - // Validate parameters - if (config.input == null) { - config.input = config.source; - } - if (config.input == null) { - throw Error("Missing input parameter"); - } - // need to records the list of image to see if status is modified or not after load - // for this we print the existing images as REPOSITORY:TAG:IMAGE - // parse the result to record images as an array of {'REPOSITORY:TAG:'= 'IMAGE'} - log("DEBUG", "Storing previous state of image"); - if (config.checksum == null) { - log("DEBUG", "No checksum provided"); - } else { - log("INFO", `Checksum provided :${config.checksum}`); - } - if (config.checksum == null) { - config.checksum = ""; - } - // Load registered image and search for a matching ID - let checksumExists = false; - let images = await this.docker.tools - .execute({ - format: "jsonlines", - command: `images --filter dangling=false --format '{{json .}}'`, - }) - .then(({ data }) => data) - .then((images) => - images.map((img) => { - if (img.ID === config.checksum) { - log( - "INFO", - `Image already exist checksum :${config.checksum}, repo:tag \"${img.Repository}:${img.Tag}\"` - ); - checksumExists = true; - } - return `${img.Repository}:${img.Tag}#${img.ID}`; +// Action +export default { + handler: async function ({ config, tools: { log } }) { + // Validate parameters + if (config.input == null) { + config.input = config.source; + } + if (config.input == null) { + throw Error("Missing input parameter"); + } + // need to records the list of image to see if status is modified or not after load + // for this we print the existing images as REPOSITORY:TAG:IMAGE + // parse the result to record images as an array of {'REPOSITORY:TAG:'= 'IMAGE'} + log("DEBUG", "Storing previous state of image"); + if (config.checksum == null) { + log("DEBUG", "No checksum provided"); + } else { + log("INFO", `Checksum provided :${config.checksum}`); + } + if (config.checksum == null) { + config.checksum = ""; + } + // Load registered image and search for a matching ID + let checksumExists = false; + let images = await this.docker + .images({ + filters: { dangling: false }, }) + .then(({images}) => + images.map((image) => { + if (image.ID === config.checksum) { + log( + "INFO", + `Image already exist checksum :${config.checksum}, repo:tag \"${image.Repository}:${image.Tag}\"` + ); + checksumExists = true; + } + return `${image.Repository}:${image.Tag}#${image.ID}`; + }) + ); + // Stop here if matching ID is found + if (checksumExists) { + return false; + } + // Load the image and extract its name + log("INFO", `Start Loading image ${config.input} and extract its name`); + const { data: name } = await this.docker.tools.execute({ + command: `load -i ${config.input}`, + format: ({ stdout }) => /^.*\s(.*)$/.exec(stdout.trim())[1], + }); + const { data: imageInfo } = await this.docker.tools.execute({ + command: `image ls --format '{{json .}}' ${name}`, + format: "json", + }); + let status = !images.includes( + `${imageInfo.Repository}:${imageInfo.Tag}#${imageInfo.ID}` ); - // Stop here if matching ID is found - if (checksumExists) { - return false; - } - // Load the image and extract its name - log("INFO", `Start Loading image ${config.input} and extract its name`); - const { data: name, stdout } = await this.docker.tools.execute({ - command: `load -i ${config.input}`, - format: ({ stdout }) => /^.*\s(.*)$/.exec(stdout.trim())[1], - }); - const { data: imageInfo } = await this.docker.tools.execute({ - command: `image ls --format '{{json .}}' ${name}`, - format: "json", - }); - let status = !images.includes( - `${imageInfo.Repository}:${imageInfo.Tag}#${imageInfo.ID}` - ); - return { - $status: status, - }; -}; - -// ## Exports -module.exports = { - handler: handler, + return { + $status: status, + }; + }, metadata: { global: "docker", definitions: definitions, }, }; - -// ## Dependencies -utils = require("../utils"); diff --git a/packages/docker/lib/load/schema.json b/packages/docker/lib/load/schema.json index e4e45b7db..ff5c6d0d7 100644 --- a/packages/docker/lib/load/schema.json +++ b/packages/docker/lib/load/schema.json @@ -7,7 +7,7 @@ "description": "If provided, will check if attached input archive to checksum already\nexist, not native to docker but implemented to get better performance." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "input": { "type": "string", diff --git a/packages/docker/lib/login/index.js b/packages/docker/lib/login/index.js index d5c3edd94..4de562c0f 100644 --- a/packages/docker/lib/login/index.js +++ b/packages/docker/lib/login/index.js @@ -1,10 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); -const utils = require("../../utils"); -const esa = utils.string.escapeshellarg; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { await this.docker.tools.execute({ command: [ diff --git a/packages/docker/lib/login/schema.json b/packages/docker/lib/login/schema.json index d30f3125b..ad956f1d2 100644 --- a/packages/docker/lib/login/schema.json +++ b/packages/docker/lib/login/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "email": { "type": "string", diff --git a/packages/docker/lib/logout/index.js b/packages/docker/lib/logout/index.js index 9e24e6cf2..540c669d8 100644 --- a/packages/docker/lib/logout/index.js +++ b/packages/docker/lib/logout/index.js @@ -1,11 +1,10 @@ // Dependencies -const definitions = require("./schema.json"); -const utils = require("../../utils"); -const esa = utils.string.escapeshellarg; +import definitions from "./schema.json" assert { type: "json" }; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; // Action -module.exports = { +export default { handler: async function({config}) { const command = [ 'logout', diff --git a/packages/docker/lib/logout/schema.json b/packages/docker/lib/logout/schema.json index fb9816b80..685326e69 100644 --- a/packages/docker/lib/logout/schema.json +++ b/packages/docker/lib/logout/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "registry": { "type": "string", diff --git a/packages/docker/lib/pause/index.js b/packages/docker/lib/pause/index.js index 323b393a8..09805d388 100644 --- a/packages/docker/lib/pause/index.js +++ b/packages/docker/lib/pause/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { await this.docker.tools.execute({ command: `pause ${config.container}` diff --git a/packages/docker/lib/pause/schema.json b/packages/docker/lib/pause/schema.json index e1faa3575..34e26641e 100644 --- a/packages/docker/lib/pause/schema.json +++ b/packages/docker/lib/pause/schema.json @@ -7,7 +7,7 @@ "description": "Name/ID of the container." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" } }, "required": [ diff --git a/packages/docker/lib/pull/index.js b/packages/docker/lib/pull/index.js index 37d0a0792..f43b3a6c6 100644 --- a/packages/docker/lib/pull/index.js +++ b/packages/docker/lib/pull/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { // Validate const [name, tag] = config.image.split(":"); diff --git a/packages/docker/lib/pull/schema.json b/packages/docker/lib/pull/schema.json index 066825cce..684d87237 100644 --- a/packages/docker/lib/pull/schema.json +++ b/packages/docker/lib/pull/schema.json @@ -8,7 +8,7 @@ "description": "Pull all tagged images in the repository." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "image": { "type": "string", diff --git a/packages/docker/lib/register.js b/packages/docker/lib/register.js index 9b6225973..2d63974e1 100644 --- a/packages/docker/lib/register.js +++ b/packages/docker/lib/register.js @@ -1,48 +1,42 @@ // Dependencies -require('@nikitajs/file/lib/register'); -const registry = require('@nikitajs/core/lib/registry'); +import '@nikitajs/file/register'; +import registry from "@nikitajs/core/registry"; // Action registration -module.exports = { +const actions = { docker: { - build: '@nikitajs/docker/lib/build', + build: '@nikitajs/docker/build', compose: { - '': '@nikitajs/docker/lib/compose', - up: '@nikitajs/docker/lib/compose' + '': '@nikitajs/docker/compose', + up: '@nikitajs/docker/compose' }, - cp: '@nikitajs/docker/lib/cp', - exec: '@nikitajs/docker/lib/exec', - inspect: '@nikitajs/docker/lib/inspect', - kill: '@nikitajs/docker/lib/kill', - load: '@nikitajs/docker/lib/load', - pause: '@nikitajs/docker/lib/pause', - pull: '@nikitajs/docker/lib/pull', - restart: '@nikitajs/docker/lib/restart', - rm: '@nikitajs/docker/lib/rm', - rmi: '@nikitajs/docker/lib/rmi', - run: '@nikitajs/docker/lib/run', - save: '@nikitajs/docker/lib/save', - start: '@nikitajs/docker/lib/start', - stop: '@nikitajs/docker/lib/stop', + cp: '@nikitajs/docker/cp', + exec: '@nikitajs/docker/exec', + images: '@nikitajs/docker/images', + inspect: '@nikitajs/docker/inspect', + kill: '@nikitajs/docker/kill', + load: '@nikitajs/docker/load', + pause: '@nikitajs/docker/pause', + pull: '@nikitajs/docker/pull', + restart: '@nikitajs/docker/restart', + rm: '@nikitajs/docker/rm', + rmi: '@nikitajs/docker/rmi', + run: '@nikitajs/docker/run', + save: '@nikitajs/docker/save', + start: '@nikitajs/docker/start', + stop: '@nikitajs/docker/stop', tools: { - checksum: '@nikitajs/docker/lib/tools/checksum', - execute: '@nikitajs/docker/lib/tools/execute', - service: '@nikitajs/docker/lib/tools/service', - status: '@nikitajs/docker/lib/tools/status' + checksum: '@nikitajs/docker/tools/checksum', + execute: '@nikitajs/docker/tools/execute', + service: '@nikitajs/docker/tools/service', + status: '@nikitajs/docker/tools/status' }, - // unpause: '@nikitajs/docker/lib/unpause' - volume_create: '@nikitajs/docker/lib/volume_create', - volume_rm: '@nikitajs/docker/lib/volume_rm', - wait: '@nikitajs/docker/lib/wait' + // unpause: '@nikitajs/docker/unpause' + volume_create: '@nikitajs/docker/volume_create', + volume_rm: '@nikitajs/docker/volume_rm', + wait: '@nikitajs/docker/wait' } }; -(async function() { - try { - return (await registry.register(module.exports)); - } catch (error) { - console.error(error.stack); - return process.exit(1); - } -})(); +await registry.register(actions) diff --git a/packages/docker/lib/restart/index.js b/packages/docker/lib/restart/index.js index dc506a881..aafde8803 100644 --- a/packages/docker/lib/restart/index.js +++ b/packages/docker/lib/restart/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { await this.docker.tools.execute({ command: ['restart', config.timeout != null ? `-t ${config.timeout}` : void 0, `${config.container}`].join(' ') diff --git a/packages/docker/lib/restart/schema.json b/packages/docker/lib/restart/schema.json index a65c405ec..8f74b9019 100644 --- a/packages/docker/lib/restart/schema.json +++ b/packages/docker/lib/restart/schema.json @@ -7,7 +7,7 @@ "description": "Name/ID of the container." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "timeout": { "type": "integer", diff --git a/packages/docker/lib/rm/index.js b/packages/docker/lib/rm/index.js index 3d93b7a44..fcceeb2c3 100644 --- a/packages/docker/lib/rm/index.js +++ b/packages/docker/lib/rm/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const { $status: exists, diff --git a/packages/docker/lib/rm/schema.json b/packages/docker/lib/rm/schema.json index 9ef00b371..53714f414 100644 --- a/packages/docker/lib/rm/schema.json +++ b/packages/docker/lib/rm/schema.json @@ -7,7 +7,7 @@ "description": "Name/ID of the container." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "link": { "type": "boolean", diff --git a/packages/docker/lib/rmi/index.js b/packages/docker/lib/rmi/index.js index ce957359f..0ffcb2da0 100644 --- a/packages/docker/lib/rmi/index.js +++ b/packages/docker/lib/rmi/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { const {$status} = await this.docker.tools.execute({ command: [ diff --git a/packages/docker/lib/rmi/schema.json b/packages/docker/lib/rmi/schema.json index 346cc13e9..509c57d1c 100644 --- a/packages/docker/lib/rmi/schema.json +++ b/packages/docker/lib/rmi/schema.json @@ -7,7 +7,7 @@ "description": "Change the build working directory." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "image": { "type": "string", diff --git a/packages/docker/lib/run/index.js b/packages/docker/lib/run/index.js index 3d3dc4c82..7f332c676 100644 --- a/packages/docker/lib/run/index.js +++ b/packages/docker/lib/run/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); -const utils = require("../utils"); +import definitions from "./schema.json" assert { type: "json" }; +import utils from "@nikitajs/docker/utils"; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { if (!(config.name != null || config.rm)) { log({ diff --git a/packages/docker/lib/run/schema.json b/packages/docker/lib/run/schema.json index 3081f8fa3..ebdc1465c 100644 --- a/packages/docker/lib/run/schema.json +++ b/packages/docker/lib/run/schema.json @@ -73,7 +73,7 @@ "description": "Set custom DNS search domain(s)." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "entrypoint": { "type": "string", diff --git a/packages/docker/lib/save/index.js b/packages/docker/lib/save/index.js index 984a2a0a7..b340ae307 100644 --- a/packages/docker/lib/save/index.js +++ b/packages/docker/lib/save/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({ config, tools: {log} diff --git a/packages/docker/lib/save/schema.json b/packages/docker/lib/save/schema.json index b6ab5a930..120cb0b90 100644 --- a/packages/docker/lib/save/schema.json +++ b/packages/docker/lib/save/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "image": { "type": "string", diff --git a/packages/docker/lib/start/index.js b/packages/docker/lib/start/index.js index 57ba748e5..cd6d39a73 100644 --- a/packages/docker/lib/start/index.js +++ b/packages/docker/lib/start/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({ config, tools: {log} @@ -15,13 +15,11 @@ module.exports = { log({ message: `Container already started ${config.container} (Skipping)`, level: 'INFO', - module: 'nikita/lib/docker/start' }); } else { log({ message: `Starting container ${config.container}`, level: 'INFO', - module: 'nikita/lib/docker/start' }); } await this.docker.tools.execute({ diff --git a/packages/docker/lib/start/schema.json b/packages/docker/lib/start/schema.json index 49b35f3e0..400efe7c2 100644 --- a/packages/docker/lib/start/schema.json +++ b/packages/docker/lib/start/schema.json @@ -12,7 +12,7 @@ "description": "Name/ID of the container, required." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" } }, "required": [ diff --git a/packages/docker/lib/stop/index.js b/packages/docker/lib/stop/index.js index 33850323d..286ba492b 100644 --- a/packages/docker/lib/stop/index.js +++ b/packages/docker/lib/stop/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { // rm is false by default only if config.service is true const { $status } = await this.docker.tools.status(config, { @@ -12,13 +12,11 @@ module.exports = { log({ message: `Stopping container ${config.container}`, level: "INFO", - module: "nikita/lib/docker/stop", }); } else { log({ message: `Container already stopped ${config.container} (Skipping)`, level: "INFO", - module: "nikita/lib/docker/stop", }); } await this.docker.tools.execute({ diff --git a/packages/docker/lib/stop/schema.json b/packages/docker/lib/stop/schema.json index 21a575c6d..5f52cf46c 100644 --- a/packages/docker/lib/stop/schema.json +++ b/packages/docker/lib/stop/schema.json @@ -7,7 +7,7 @@ "description": "Name/ID of the container." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "timeout": { "type": "integer", diff --git a/packages/docker/lib/tools/checksum/index.js b/packages/docker/lib/tools/checksum/index.js index 157c0819f..52719983f 100644 --- a/packages/docker/lib/tools/checksum/index.js +++ b/packages/docker/lib/tools/checksum/index.js @@ -1,32 +1,23 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({ config, tools: {log} }) { - log({ - message: `Getting image checksum :${config.image}`, - level: 'DEBUG' - }); + log('DEBUG', `Getting image checksum :${config.image}`); // Run `docker images` with the following config: // - `--no-trunc`: display full checksum // - `--quiet`: discard headers const {$status, stdout} = await this.docker.tools.execute({ - boot2docker: config.boot2docker, command: `images --no-trunc --quiet ${config.image}:${config.tag}`, - compose: config.compose, - machine: config.machine }); const checksum = stdout === '' ? undefined : stdout.toString().trim(); if ($status) { - log({ - message: `Image checksum for ${config.image}: ${checksum}`, - level: 'INFO' - }); + log('INFO', `Image checksum for ${config.image}: ${checksum}`); } return { $status: $status, diff --git a/packages/docker/lib/tools/checksum/schema.json b/packages/docker/lib/tools/checksum/schema.json index 72a5ae16b..7f8750c58 100644 --- a/packages/docker/lib/tools/checksum/schema.json +++ b/packages/docker/lib/tools/checksum/schema.json @@ -7,7 +7,7 @@ "description": "Change the build working directory." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "image": { "type": "string", diff --git a/packages/docker/lib/tools/execute/index.js b/packages/docker/lib/tools/execute/index.js index e7fd4db60..9f7366262 100644 --- a/packages/docker/lib/tools/execute/index.js +++ b/packages/docker/lib/tools/execute/index.js @@ -1,12 +1,12 @@ // Dependencies -const dedent = require("dedent"); -const utils = require("../../utils"); -const definitions = require("./schema.json"); -const esa = utils.string.escapeshellarg; +import dedent from "dedent"; +import utils from "@nikitajs/docker/utils"; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { find } }) { // Build Docker config.opts = Object.keys(config.opts) diff --git a/packages/docker/lib/tools/execute/schema.json b/packages/docker/lib/tools/execute/schema.json index 01f1f1ac1..e00ec9480 100644 --- a/packages/docker/lib/tools/execute/schema.json +++ b/packages/docker/lib/tools/execute/schema.json @@ -30,14 +30,14 @@ "description": "Current working directory from where to execute the command." }, "code": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/code", + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/code", "default": {} }, "docker": { "$ref": "#/definitions/docker" }, "format": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/format" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/format" } } } diff --git a/packages/docker/lib/tools/service/index.js b/packages/docker/lib/tools/service/index.js index a757ada3f..218da1cd6 100644 --- a/packages/docker/lib/tools/service/index.js +++ b/packages/docker/lib/tools/service/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { // Normalization if (config.detach == null) { diff --git a/packages/docker/lib/tools/service/schema.json b/packages/docker/lib/tools/service/schema.json index 4a510dc16..25b5e63ca 100644 --- a/packages/docker/lib/tools/service/schema.json +++ b/packages/docker/lib/tools/service/schema.json @@ -13,7 +13,7 @@ } }, { - "$ref": "module://@nikitajs/docker/lib/run" + "$ref": "module://@nikitajs/docker/run" } ], "required": [ diff --git a/packages/docker/lib/tools/status/index.js b/packages/docker/lib/tools/status/index.js index 41ec58531..b4127aba1 100644 --- a/packages/docker/lib/tools/status/index.js +++ b/packages/docker/lib/tools/status/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { // Construct exec command await this.docker.tools.execute({ diff --git a/packages/docker/lib/tools/status/schema.json b/packages/docker/lib/tools/status/schema.json index 28bb82bf9..3dca9df53 100644 --- a/packages/docker/lib/tools/status/schema.json +++ b/packages/docker/lib/tools/status/schema.json @@ -10,7 +10,7 @@ "description": "Name or Id of the container." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" } }, "required": [ diff --git a/packages/docker/lib/unpause/index.js b/packages/docker/lib/unpause/index.js index 6903e879d..27f3e2290 100644 --- a/packages/docker/lib/unpause/index.js +++ b/packages/docker/lib/unpause/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: function({config}) { this.docker.tools.execute({ command: `unpause ${config.container}` diff --git a/packages/docker/lib/unpause/schema.json b/packages/docker/lib/unpause/schema.json index 63bc806d6..141144443 100644 --- a/packages/docker/lib/unpause/schema.json +++ b/packages/docker/lib/unpause/schema.json @@ -7,7 +7,7 @@ "description": "Name/ID of the container" }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" } }, "required": [ diff --git a/packages/docker/lib/utils/docker.js b/packages/docker/lib/utils/docker.js new file mode 100644 index 000000000..19c44602e --- /dev/null +++ b/packages/docker/lib/utils/docker.js @@ -0,0 +1,132 @@ +import dedent from 'dedent'; + +const options = [ + "api-cors-header", + "bridge", + "bip", + "debug", + "daemon", + "default-gateway", + "default-gateway-v6", + "default-ulimit", + "dns", + "dns-search", + "exec-driver", + "exec-opt", + "exec-root", + "fixed-cidr", + "fixed-cidr-v6", + "group", + "graph", + "host", + "help", + "icc", + "insecure-registry", + "ip", + "ip-forward", + "ip-masq", + "iptables", + "ipv6", + "log-level", + "label", + "log-driver", + "log-opt", + "mtu", + "pidfile", + "registry-mirror", + "storage-driver", + "selinux-enabled", + "storage-opt", + "tls", + "tlscacert", + "tlscert", + "tlskey", + "tlsverify", + "userland-proxy", + "version", +]; + +const compose_options = [ + "file", + "project-name", + "verbose", + "no-ansi", + "version", + "host", + // TLS + "tls", + "tlscacert", + "tlscert", + "tlskey", + "tlsverify", + "skip-hostname-check", + "project-directory", +]; + +const opts = function (config) { + const opts = (function () { + const results = []; + for (const option in !config.compose ? options : compose_options) { + let value = config[option]; + if (value == null) { + continue; + } + if (value === true) { + value = "true"; + } + if (value === false) { + value = "false"; + } + if (option === "tlsverify") { + results.push(`--${option}`); + } else { + results.push(`--${option}=${value}`); + } + } + return results; + })(); + return opts.join(" "); +}; + +/* +Build the docker command +Accepted options are referenced in the `options` property. Also accept +"machine" and "boot2docker". +`compose` option allow to wrap the command for docker-compose instead of docker +*/ +const wrap = function (config, command) { + const options = opts(config); + const exe = config.compose ? "bin_compose" : "bin_docker"; + return dedent` + export SHELL=/bin/bash + export PATH=/opt/local/bin/:/opt/local/sbin/:/usr/local/bin/:/usr/local/sbin/:$PATH + bin_boot2docker=$(command -v boot2docker) + bin_docker=$(command -v docker) + bin_machine=$(command -v docker-machine) + bin_compose=$(command -v docker-compose) + machine='${config.machine || ""}' + boot2docker='${config.boot2docker ? "1" : ""}' + docker='' + if [[ $machine != '' ]] && [ $bin_machine ]; then + if [ -z "${config.machine || ""}" ]; then exit 5; fi + if docker-machine status "\${machine}" | egrep 'Stopped|Saved'; then + docker-machine start "\${machine}"; + fi + #docker="eval \\$(\\\${bin_machine} env \${machine}) && $${exe}" + eval "$(\${bin_machine} env \${machine})" + elif [[ $boot2docker != '1' ]] && [ $bin_boot2docker ]; then + #docker="eval \\$(\\\${bin_boot2docker} shellinit) && $${exe}" + eval "$(\${bin_boot2docker} shellinit)" + fi + $${exe} ${options} ${command} + `; +}; + +export { options, compose_options, opts, wrap }; + +export default { + options: options, + compose_options: compose_options, + opts: opts, + wrap: wrap, +}; diff --git a/packages/docker/lib/utils/index.js b/packages/docker/lib/utils/index.js index 5b2b2a0bc..ab5c376bd 100644 --- a/packages/docker/lib/utils/index.js +++ b/packages/docker/lib/utils/index.js @@ -1,106 +1,7 @@ -// Generated by CoffeeScript 2.7.0 -var utils; +import utils from "@nikitajs/core/utils"; +import * as docker from './docker.js'; -utils = require('@nikitajs/core/lib/utils'); - -module.exports = { +export default { ...utils, - options: ['api-cors-header', 'bridge', 'bip', 'debug', 'daemon', 'default-gateway', 'default-gateway-v6', 'default-ulimit', 'dns', 'dns-search', 'exec-driver', 'exec-opt', 'exec-root', 'fixed-cidr', 'fixed-cidr-v6', 'group', 'graph', 'host', 'help', 'icc', 'insecure-registry', 'ip', 'ip-forward', 'ip-masq', 'iptables', 'ipv6', 'log-level', 'label', 'log-driver', 'log-opt', 'mtu', 'pidfile', 'registry-mirror', 'storage-driver', 'selinux-enabled', 'storage-opt', 'tls', 'tlscacert', 'tlscert', 'tlskey', 'tlsverify', 'userland-proxy', 'version'], - compose_options: [ - 'file', - 'project-name', - 'verbose', - 'no-ansi', - 'version', - 'host', - // TLS - 'tls', - 'tlscacert', - 'tlscert', - 'tlskey', - 'tlsverify', - 'skip-hostname-check', - 'project-directory' - ], - opts: function(config) { - var option, opts, value; - opts = (function() { - var i, len, ref, results; - ref = module.exports[!config.compose ? 'options' : 'compose_options']; - results = []; - for (i = 0, len = ref.length; i < len; i++) { - option = ref[i]; - value = config[option]; - if (value == null) { - continue; - } - if (value === true) { - value = 'true'; - } - if (value === false) { - value = 'false'; - } - if (option === 'tlsverify') { - results.push(`--${option}`); - } else { - results.push(`--${option}=${value}`); - } - } - return results; - })(); - return opts.join(' '); - }, - /* - Build the docker command - Accepted options are referenced in "module.exports.options". Also accept - "machine" and "boot2docker". - `compose` option allow to wrap the command for docker-compose instead of docker - */ - wrap: function(config, command) { - var docker, exe, opts; - docker = {}; - opts = module.exports.opts(config); - exe = config.compose ? 'bin_compose' : 'bin_docker'; - return `export SHELL=/bin/bash -export PATH=/opt/local/bin/:/opt/local/sbin/:/usr/local/bin/:/usr/local/sbin/:$PATH -bin_boot2docker=$(command -v boot2docker) -bin_docker=$(command -v docker) -bin_machine=$(command -v docker-machine) -bin_compose=$(command -v docker-compose) -machine='${config.machine || ''}' -boot2docker='${config.boot2docker ? '1' : ''}' -docker='' -if [[ $machine != '' ]] && [ $bin_machine ]; then - if [ -z "${config.machine || ''}" ]; then exit 5; fi - if docker-machine status "\${machine}" | egrep 'Stopped|Saved'; then - docker-machine start "\${machine}"; - fi - #docker="eval \\$(\\\${bin_machine} env \${machine}) && $${exe}" - eval "$(\${bin_machine} env \${machine})" -elif [[ $boot2docker != '1' ]] && [ $bin_boot2docker ]; then - #docker="eval \\$(\\\${bin_boot2docker} shellinit) && $${exe}" - eval "$(\${bin_boot2docker} shellinit)" -fi -$${exe} ${opts} ${command}`; - } + ...docker, }; - -// wrap_schema: -// 'boot2docker': -// type: 'boolean' -// default: false -// description: ''' -// Whether to use boot2docker or not. -// ''' -// 'compose': -// type: 'boolean' -// description: ''' -// Use the `docker-compose` command instead of `docker`. -// ''' -// 'machine': -// type: 'string' -// description: ''' -// Name of the docker-machine, required if using docker-machine. -// ''' -// Reformat error message if any -// TODO: rename this function as format_error diff --git a/packages/docker/lib/volume_create/index.js b/packages/docker/lib/volume_create/index.js index 423d6af8a..4c53f1e07 100644 --- a/packages/docker/lib/volume_create/index.js +++ b/packages/docker/lib/volume_create/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {$status} = await this.docker.tools.execute({ $if: config.name, diff --git a/packages/docker/lib/volume_create/schema.json b/packages/docker/lib/volume_create/schema.json index e9b8dbc9a..3d55767e1 100644 --- a/packages/docker/lib/volume_create/schema.json +++ b/packages/docker/lib/volume_create/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "driver": { "type": "string", diff --git a/packages/docker/lib/volume_rm/index.js b/packages/docker/lib/volume_rm/index.js index 3046da629..c5548810e 100644 --- a/packages/docker/lib/volume_rm/index.js +++ b/packages/docker/lib/volume_rm/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { await this.docker.tools.execute({ command: `volume rm ${config.name}`, diff --git a/packages/docker/lib/volume_rm/schema.json b/packages/docker/lib/volume_rm/schema.json index b0921ed26..d8a07af22 100644 --- a/packages/docker/lib/volume_rm/schema.json +++ b/packages/docker/lib/volume_rm/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" }, "name": { "type": "string", diff --git a/packages/docker/lib/wait/index.js b/packages/docker/lib/wait/index.js index 2ef200103..588087b50 100644 --- a/packages/docker/lib/wait/index.js +++ b/packages/docker/lib/wait/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { // Old implementation was `wait {container} | read r; return $r` await this.docker.tools.execute(`wait ${config.container}`); diff --git a/packages/docker/lib/wait/schema.json b/packages/docker/lib/wait/schema.json index e1faa3575..34e26641e 100644 --- a/packages/docker/lib/wait/schema.json +++ b/packages/docker/lib/wait/schema.json @@ -7,7 +7,7 @@ "description": "Name/ID of the container." }, "docker": { - "$ref": "module://@nikitajs/docker/lib/tools/execute#/definitions/docker" + "$ref": "module://@nikitajs/docker/tools/execute#/definitions/docker" } }, "required": [ diff --git a/packages/docker/package.json b/packages/docker/package.json index 7eaa4a7b6..ea18c99f2 100644 --- a/packages/docker/package.json +++ b/packages/docker/package.json @@ -1,5 +1,6 @@ { "name": "@nikitajs/docker", + "version": "1.0.0-alpha.3", "description": "Provides Nikita actions for various Docker operations.", "keywords": [ "nikita", @@ -13,7 +14,6 @@ "system", "task" ], - "version": "1.0.0-alpha.3", "author": "David Worms ", "bugs": { "url": "https://github.com/adaltas/node-nikita/issues" @@ -58,21 +58,28 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + ".": "./lib/index.js", + "./register": "./lib/register.js", + "./utils": "./lib/utils/index.js", + "./utils/*": "./lib/utils/*.js", + "./*": "./lib/*/index.js" + }, "files": [ "/lib" ], "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/docker/lib/register", - "@nikitajs/network/lib/register" - ], "inline-diffs": true, - "timeout": 20000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/docker/register", + "@nikitajs/network/register", + "should" + ], + "throw-deprecation": true, + "timeout": 20000 }, "publishConfig": { "access": "public" @@ -88,5 +95,6 @@ "test": "npm run test:local && npm run test:env", "test:env": "env/run.sh", "test:local": "mocha 'test/**/*.coffee'" - } + }, + "type": "module" } diff --git a/packages/docker/test.sample.coffee b/packages/docker/test.sample.coffee index 127d464c9..ceb5550e8 100644 --- a/packages/docker/test.sample.coffee +++ b/packages/docker/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: docker: false # disable_docker docker_volume: false @@ -15,5 +15,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/docker/test/build.coffee b/packages/docker/test/build.coffee index 7f4524701..5a2f9982a 100644 --- a/packages/docker/test/build.coffee +++ b/packages/docker/test/build.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.build', -> + return unless test.tags.docker @timeout 60000 @@ -14,7 +14,7 @@ describe 'docker.build', -> they 'fail with missing image parameter', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker .docker.build false_source: 'Dockerfile' .should.be.rejectedWith @@ -28,10 +28,10 @@ describe 'docker.build', -> they 'fail with exclusive parameters', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker .docker.build image: 'nikita/should_not_exists_1' - file: "#{__dirname}/Dockerfile" + file: "/a_dir/Dockerfile" content: "FROM scratch \ CMD ['echo \"hello world\"']" .should.be.rejectedWith code: 'NIKITA_DOCKER_BUILD_CONTENT_FILE_REQUIRED' @@ -43,62 +43,65 @@ describe 'docker.build', -> they 'from text', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rmi 'nikita/should_exists_1' - {$status, image, stdout} = await @docker.build + await @docker.rmi 'nikita/should_exists_1' + {$status, image_id, stdout, stderr} = await @docker.build image: 'nikita/should_exists_1' content: """ FROM scratch CMD echo hello 1 """ $status.should.be.true() - image.should.match /^\w{12}$/ - stdout.should.containEql 'Step 2/2 : CMD echo hello' - @docker.rmi 'nikita/should_exists_1' + image_id.should.match /^\w{12}$/ + stdout.should.be.a.String() + stderr.should.be.a.String() + await @docker.rmi 'nikita/should_exists_1' they 'from cwd', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> - @docker.rmi 'nikita/should_exists_2' - @file + await @docker.rmi 'nikita/should_exists_2' + await @file target: "#{tmpdir}/Dockerfile" content: """ FROM scratch CMD echo hello 2 """ - {$status} = await @docker.build + {$status, image_id} = await @docker.build image: 'nikita/should_exists_2' cwd: tmpdir $status.should.be.true() - @docker.rmi 'nikita/should_exists_2' + image_id.should.match /^\w{12}$/ + await @docker.rmi 'nikita/should_exists_2' they 'from Dockerfile (exist)', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> - @docker.rmi 'nikita/should_exists_3' - @file + await @docker.rmi 'nikita/should_exists_3' + await @file content: """ FROM scratch CMD ['echo "hello build from Dockerfile #{Date.now()}"'] """ target: "#{tmpdir}/nikita_Dockerfile" - {$status} = await @docker.build + {$status, image_id} = await @docker.build image: 'nikita/should_exists_3' file: "#{tmpdir}/nikita_Dockerfile" $status.should.be.true() - @docker.rmi 'nikita/should_exists_3' + image_id.should.match /^\w{12}$/ + await @docker.rmi 'nikita/should_exists_3' they 'from Dockerfile (not exist)', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> await @docker.build @@ -110,17 +113,17 @@ describe 'docker.build', -> they 'status not modified', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> - @docker.rmi 'nikita/should_exists_5' - @file + await @docker.rmi 'nikita/should_exists_5' + await @file target: "#{tmpdir}/nikita_Dockerfile" content: """ FROM scratch CMD echo hello 5 """ - {$logs: logs_status_true, $status, stdout} = await @docker.build + {$logs: logs_status_true, $status} = await @docker.build image: 'nikita/should_exists_5' file: "#{tmpdir}/nikita_Dockerfile" $status.should.be.true() @@ -128,7 +131,7 @@ describe 'docker.build', -> image: 'nikita/should_exists_5' file: "#{tmpdir}/nikita_Dockerfile" $status.should.be.false() - @docker.rmi 'nikita/should_exists_5' - @call -> + await @docker.rmi 'nikita/should_exists_5' + await @call -> logs_status_true.filter( (s) -> /^New image id/.test s?.message ).length.should.eql 1 logs_status_false.filter( (s) -> /^Identical image id/.test s?.message ).length.should.eql 1 diff --git a/packages/docker/test/compose/index.coffee b/packages/docker/test/compose/index.coffee index 322eec1ac..be5d77966 100644 --- a/packages/docker/test/compose/index.coffee +++ b/packages/docker/test/compose/index.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require '../test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.compose', -> + return unless test.tags.docker @timeout 90000 they 'up from content', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> await @docker.rm container: 'nikita_docker_compose_up_content' @@ -43,7 +43,7 @@ describe 'docker.compose', -> they 'up from content to file', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> await @docker.rm @@ -76,7 +76,7 @@ describe 'docker.compose', -> they 'up from file', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> await @docker.rm @@ -110,7 +110,7 @@ describe 'docker.compose', -> they 'up with service name', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> await @docker.rm @@ -145,7 +145,7 @@ describe 'docker.compose', -> they 'status not modified', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> await @docker.rm diff --git a/packages/docker/test/cp.coffee b/packages/docker/test/cp.coffee index 9c8bb9441..7778a42ee 100644 --- a/packages/docker/test/cp.coffee +++ b/packages/docker/test/cp.coffee @@ -1,24 +1,24 @@ -nikita = require '@nikitajs/core/lib' -path = require 'path' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import path from 'node:path' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.cp', -> + return unless test.tags.docker @timeout 20000 they 'a remote file to a local file', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> - @docker.rm + await @docker.rm container: 'nikita_extract' - @docker.run + await @docker.run name: 'nikita_extract' image: 'alpine' command: "whoami" @@ -27,19 +27,19 @@ describe 'docker.cp', -> source: 'nikita_extract:/etc/apk/repositories' target: "#{tmpdir}/a_file" $status.should.be.true() - @fs.assert + await @fs.assert target: "#{tmpdir}/a_file" - @docker.rm + await @docker.rm container: 'nikita_extract' they 'a remote file to a local directory', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> - @docker.rm container: 'nikita_extract' - @docker.run + await @docker.rm container: 'nikita_extract' + await @docker.run name: 'nikita_extract' image: 'alpine' command: "whoami" @@ -48,54 +48,62 @@ describe 'docker.cp', -> source: 'nikita_extract:/etc/apk/repositories' target: "#{tmpdir}" $status.should.be.true() - @fs.assert + await @fs.assert target: "#{tmpdir}/repositories" - @docker.rm container: 'nikita_extract' + await @docker.rm container: 'nikita_extract' they 'a local file to a remote file', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> - @docker.rm container: 'nikita_extract' - @docker.run + await @docker.rm container: 'nikita_extract' + await @file + content: "Hello" + target: "#{tmpdir}/source/a_file" + await @fs.mkdir "#{tmpdir}/target" + await @docker.run name: 'nikita_extract' image: 'alpine' volume: "#{tmpdir}:/root" command: "whoami" rm: false {$status} = await @docker.cp - source: "#{__filename}" + source: "#{tmpdir}/source/a_file" target: "nikita_extract:/root/a_file" $status.should.be.true() - @docker.cp + await @docker.cp source: 'nikita_extract:/root/a_file' - target: "#{tmpdir}" - @fs.assert - target: "#{tmpdir}/a_file" - @docker.rm container: 'nikita_extract' + target: "#{tmpdir}/target" + await @fs.assert + target: "#{tmpdir}/target/a_file" + await @docker.rm container: 'nikita_extract' they 'a local file to a remote directory', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> - @docker.rm container: 'nikita_extract' - @docker.run + await @docker.rm container: 'nikita_extract' + await @file + content: "Hello" + target: "#{tmpdir}/source/a_file" + await @fs.mkdir "#{tmpdir}/target" + await @docker.run name: 'nikita_extract' image: 'alpine' volume: "#{tmpdir}:/root" command: "whoami" rm: false {$status} = await @docker.cp - source: "#{__filename}" + source: "#{tmpdir}/source/a_file" target: "nikita_extract:/root" $status.should.be.true() - @docker.cp - source: "nikita_extract:/root/#{path.basename __filename}" - target: "#{tmpdir}" - @fs.assert - target: "#{tmpdir}/#{path.basename __filename}" - @docker.rm container: 'nikita_extract' + await @docker.cp + source: "nikita_extract:/root/a_file" + target: "#{tmpdir}/target" + await @fs.assert + target: "#{tmpdir}/target/a_file" + await @docker.rm container: 'nikita_extract' diff --git a/packages/docker/test/exec.coffee b/packages/docker/test/exec.coffee index 4cbaf9e46..0e84c176d 100644 --- a/packages/docker/test/exec.coffee +++ b/packages/docker/test/exec.coffee @@ -1,16 +1,16 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.exec', -> + return unless test.tags.docker they 'simple command', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> await @docker.rm container: 'nikita_test_exec' @@ -30,16 +30,16 @@ describe 'docker.exec', -> they 'on stopped container', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> try - @docker.rm + await @docker.rm container: 'nikita_test_exec' force: true - @docker.tools.service + await @docker.tools.service image: 'httpd' container: 'nikita_test_exec' - @docker.stop + await @docker.stop container: 'nikita_test_exec' await @docker.exec container: 'nikita_test_exec' @@ -55,22 +55,22 @@ describe 'docker.exec', -> they 'on non existing container', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.exec + await @docker.exec container: 'nikita_fake_container' command: 'echo toto' - .should.be.rejectedWith 'Error: No such container: nikita_fake_container' + .should.be.rejectedWith /No such container: nikita_fake_container/ they 'skip exit code', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm container: 'nikita_test_exec' force: true - @docker.tools.service + await @docker.tools.service image: 'httpd' container: 'nikita_test_exec' {$status} = await @docker.exec @@ -78,6 +78,6 @@ describe 'docker.exec', -> command: 'toto' code: [0, 126] $status.should.be.false() - @docker.rm + await @docker.rm container: 'nikita_test_exec' force: true diff --git a/packages/docker/test/images.coffee b/packages/docker/test/images.coffee new file mode 100644 index 000000000..5dbd9cabf --- /dev/null +++ b/packages/docker/test/images.coffee @@ -0,0 +1,80 @@ + +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) + +describe 'docker.images', -> + return unless test.tags.docker + + they 'all images', ({ssh}) -> + nikita + $ssh: ssh + docker: test.docker + , -> + await @docker.build + image: 'nikita/images_all' + tag: 'latest' + content: "FROM alpine\nCMD ['echo hello']" + {images, count} = await @docker.images() + images.filter( ({Repository}) -> Repository is 'nikita/images_all').should.match [ + Containers: 'N/A', + CreatedAt: /\d{4}-\d{2}-\d{2} [\d]{2}:[\d]{2}:[\d]{2} \+0000 UTC/ + CreatedSince: /\w*/, + Digest: '', + ID: /\w{12}/, + Repository: 'nikita/images_all', + SharedSize: 'N/A', + Size: /[\d\.]+MB/, + Tag: 'latest', + UniqueSize: 'N/A', + VirtualSize: /[\d\.]+MB/ + ] + count.should.be.a.Number() + await @docker.rmi 'nikita/images_all' + + they 'filter dangling `true`', ({ssh}) -> + nikita + $ssh: ssh + docker: test.docker + , -> + await @docker.build + image: 'nikita/images_dangling_true' + tag: 'latest' + content: "FROM alpine\nLABEL nikita=dangling_true\nCMD ['echo 1']" + await @docker.build + image: 'nikita/images_dangling_true' + tag: 'latest' + content: "FROM alpine\nLABEL nikita=dangling_true\nCMD ['echo 2']" + {images, count} = await @docker.images + filters: + label: 'nikita=dangling_true' + dangling: true + images.should.match [ + Repository: '', + Tag: '', + ] + await @docker.rmi 'nikita/images_dangling_true' + + they 'filter dangling `false`', ({ssh}) -> + nikita + $ssh: ssh + docker: test.docker + , -> + await @docker.build + image: 'nikita/images_dangling_false' + tag: 'latest' + content: "FROM alpine\nLABEL nikita=dangling_false\nCMD ['echo 1']" + await @docker.build + image: 'nikita/images_dangling_false' + tag: 'latest' + content: "FROM alpine\nLABEL nikita=dangling_false\nCMD ['echo 2']" + {images, count} = await @docker.images + filters: + label: 'nikita=dangling_false' + dangling: false + images.should.match [ + Repository: 'nikita/images_dangling_false', + Tag: 'latest', + ] + await @docker.rmi 'nikita/images_dangling_false' diff --git a/packages/docker/test/inspect.coffee b/packages/docker/test/inspect.coffee index ff0ca83bc..a894ee9bb 100644 --- a/packages/docker/test/inspect.coffee +++ b/packages/docker/test/inspect.coffee @@ -1,41 +1,41 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.inspect', -> + return unless test.tags.docker they 'one running container', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm container: 'nikita_test_inspect' force: true - @docker.tools.service + await @docker.tools.service image: 'httpd' container: 'nikita_test_inspect' {info} = await @docker.inspect container: 'nikita_test_inspect' info.Name.should.eql '/nikita_test_inspect' - @docker.rm + await @docker.rm container: 'nikita_test_inspect' force: true they 'two running containers', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm [ + await @docker.rm [ container: 'nikita_test_inspect_1' , container: 'nikita_test_inspect_2' ], force: true - @docker.tools.service [ + await @docker.tools.service [ container: 'nikita_test_inspect_1' , container: 'nikita_test_inspect_2' @@ -50,7 +50,7 @@ describe 'docker.inspect', -> '/nikita_test_inspect_1' '/nikita_test_inspect_2' ] - @docker.rm [ + await @docker.rm [ container: 'nikita_test_inspect_1' , container: 'nikita_test_inspect_2' diff --git a/packages/docker/test/kill.coffee b/packages/docker/test/kill.coffee index d28eea29d..3e4f2691a 100644 --- a/packages/docker/test/kill.coffee +++ b/packages/docker/test/kill.coffee @@ -1,21 +1,21 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.kill', -> + return unless test.tags.docker they 'running container', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm container: 'nikita_test_kill' force: true - @docker.tools.service + await @docker.tools.service image: 'httpd' port: '499:80' container: 'nikita_test_kill' @@ -27,16 +27,16 @@ describe 'docker.kill', -> @timeout 120000 nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm container: 'nikita_test_kill' force: true - @docker.tools.service + await @docker.tools.service image: 'httpd' port: '499:80' container: 'nikita_test_kill' - @docker.kill + await @docker.kill container: 'nikita_test_kill' {$status} = await @docker.kill container: 'nikita_test_kill' @@ -45,11 +45,11 @@ describe 'docker.kill', -> they 'status not modified (not living)', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm container: 'nikita_test_kill' - @docker.run + await @docker.run command: "/bin/echo 'test'" image: 'alpine' rm: false @@ -57,5 +57,5 @@ describe 'docker.kill', -> {$status} = await @docker.kill container: 'nikita_test_kill' $status.should.be.false() - @docker.rm + await @docker.rm container: 'nikita_test_kill' diff --git a/packages/docker/test/load.coffee b/packages/docker/test/load.coffee index 5f2f357a1..1e34489ee 100644 --- a/packages/docker/test/load.coffee +++ b/packages/docker/test/load.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.load', -> + return unless test.tags.docker # timestamp ensures that hash of the built image will be unique and # image checksum is also unique @@ -14,7 +14,7 @@ describe 'docker.load', -> @timeout 30000 nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> await @docker.build @@ -36,10 +36,10 @@ describe 'docker.load', -> they 'not loading if checksum match existing image', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> - {image} = await @docker.build + {image_id} = await @docker.build image: 'nikita/load_test' tag: 'latest' content: "FROM alpine\nCMD ['echo \"docker.build #{Date.now()}\"']" @@ -49,14 +49,14 @@ describe 'docker.load', -> output: "#{tmpdir}/nikita_load.tar" {$status} = await @docker.load input: "#{tmpdir}/nikita_load.tar" - checksum: image + checksum: image_id $status.should.be.false() they 'status not modified if same image', ({ssh}) -> @timeout 30000 nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> await @docker.rmi diff --git a/packages/docker/test/pull.coffee b/packages/docker/test/pull.coffee index 995fdd80f..13208e024 100644 --- a/packages/docker/test/pull.coffee +++ b/packages/docker/test/pull.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.pull', -> + return unless test.tags.docker they 'pull image', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rmi + await @docker.rmi image: 'alpine' force: true {$status} = await @docker.pull @@ -22,12 +22,12 @@ describe 'docker.pull', -> they '$status not modified if same image', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rmi + await @docker.rmi image: 'alpine' force: true - @docker.pull + await @docker.pull image: 'alpine' {$status} = await @docker.pull image: 'alpine' @@ -36,9 +36,9 @@ describe 'docker.pull', -> they 'pull specific image tag', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rmi + await @docker.rmi image: 'alpine' tag: 'edge' force: true @@ -55,9 +55,9 @@ describe 'docker.pull', -> # we need to find an image with a few tags nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rmi + await @docker.rmi image: 'alpine' force: true {$status} = await @docker.pull diff --git a/packages/docker/test/rm.coffee b/packages/docker/test/rm.coffee index 94178c1a0..e6005aa67 100644 --- a/packages/docker/test/rm.coffee +++ b/packages/docker/test/rm.coffee @@ -1,22 +1,22 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.rm', -> + return unless test.tags.docker they 'status', ({ssh}) -> @timeout 30000 nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm force: true container: 'nikita_rm' - @docker.run + await @docker.run command: "/bin/echo 'test'" image: 'alpine' name: 'nikita_rm' @@ -32,13 +32,13 @@ describe 'docker.rm', -> @timeout 30000 nikita $ssh: ssh - docker: docker + docker: test.docker , -> try - @docker.rm + await @docker.rm container: 'nikita_rm' force: true - @docker.tools.service + await @docker.tools.service image: 'httpd' port: '499:80' container: 'nikita_rm' @@ -50,16 +50,16 @@ describe 'docker.rm', -> # Container must be stopped to be removed without force err.message.should.match /(You cannot remove a running container)|(Container must be stopped)/ finally - @docker.stop + await @docker.stop container: 'nikita_rm' - @docker.rm + await @docker.rm container: 'nikita_rm' they 'remove live container (with force)', ({ssh}) -> @timeout 30000 nikita $ssh: ssh - docker: docker + docker: test.docker , -> await @docker.rm container: 'nikita_rm' diff --git a/packages/docker/test/rmi.coffee b/packages/docker/test/rmi.coffee index 0062c66a2..07eb439a0 100644 --- a/packages/docker/test/rmi.coffee +++ b/packages/docker/test/rmi.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.rmi', -> + return unless test.tags.docker they 'remove image', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.build + await @docker.build image: 'nikita/rmi_test' content: "FROM scratch\nCMD ['echo \"hello build from text\"']" {$status} = await @docker.rmi @@ -22,12 +22,12 @@ describe 'docker.rmi', -> they 'status unmodifed', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.build + await @docker.build image: 'nikita/rmi_test:latest' content: "FROM scratch\nCMD ['echo \"hello build from text\"']" - @docker.rmi + await @docker.rmi image: 'nikita/rmi_test' {$status} = await @docker.rmi image: 'nikita/rmi_test' diff --git a/packages/docker/test/run.coffee b/packages/docker/test/run.coffee index a7d6ff7e1..6b19f8d48 100644 --- a/packages/docker/test/run.coffee +++ b/packages/docker/test/run.coffee @@ -1,16 +1,16 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.run', -> + return unless test.tags.docker they 'simple command', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> {$status, stdout} = await @docker.run command: "/bin/echo 'test'" @@ -21,9 +21,9 @@ describe 'docker.run', -> they '--rm (flag option)', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm force: true container: 'nikita_test_rm' {stdout} = await @docker.run @@ -32,7 +32,7 @@ describe 'docker.run', -> container: 'nikita_test_rm' rm: false stdout.should.match /^test.*/ - @docker.rm + await @docker.rm force: true container: 'nikita_test_rm' @@ -40,51 +40,48 @@ describe 'docker.run', -> @timeout 0 nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm container: 'nikita_test_unique' force: true - @docker.run + await @docker.run image: 'httpd' port: '499:80' container: 'nikita_test_unique' detach: true rm: false - @docker.rm + await @docker.rm force: true container: 'nikita_test_unique' they 'array options', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm force: true container: 'nikita_test_array' - @docker.run + await @docker.run image: 'httpd' port: [ '500:80', '501:81' ] container: 'nikita_test_array' detach: true rm: false - # .wait_connect - # host: ipadress of docker, docker-machine... - # port: 500 - @docker.rm + await @docker.rm force: true container: 'nikita_test_array' they 'existing container', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm force: true container: 'nikita_test' - @docker.run + await @docker.run command: 'echo test' image: 'alpine' container: 'nikita_test' @@ -95,19 +92,19 @@ describe 'docker.run', -> container: 'nikita_test' rm: false $status.should.be.false() - @docker.rm + await @docker.rm force: true container: 'nikita_test' they 'status not modified', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm force: true container: 'nikita_test' - @docker.run + await @docker.run command: 'echo test' image: 'alpine' container: 'nikita_test' @@ -118,6 +115,6 @@ describe 'docker.run', -> container: 'nikita_test' rm: false $status.should.be.false() - @docker.rm + await @docker.rm force: true container: 'nikita_test' diff --git a/packages/docker/test/save.coffee b/packages/docker/test/save.coffee index c5ba9e6bd..a3c8cb0fb 100644 --- a/packages/docker/test/save.coffee +++ b/packages/docker/test/save.coffee @@ -1,19 +1,19 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.save', -> + return unless test.tags.docker they 'saves a simple image', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker $tmpdir: true , ({metadata: {tmpdir}}) -> - @docker.build + await @docker.build image: 'nikita/load_test' content: "FROM alpine\nCMD ['echo \"hello build from text\"']" {$status} = await @docker.save @@ -25,13 +25,13 @@ describe 'docker.save', -> # For now, there are no mechanism to compare the checksum between an old and a new target nikita $ssh: ssh - docker: docker + docker: test.docker tmpdir: true , ({metadata: {tmpdir}}) -> - @docker.build + await @docker.build image: 'nikita/load_test' content: "FROM alpine\nCMD ['echo \"hello build from text\"']" - @docker.save + await @docker.save debug: true image: 'nikita/load_test:latest' output: "#{tmpdir}/nikita_saved.tar" diff --git a/packages/docker/test/start.coffee b/packages/docker/test/start.coffee index e93c791be..91fa0fcea 100644 --- a/packages/docker/test/start.coffee +++ b/packages/docker/test/start.coffee @@ -1,50 +1,50 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.start', -> + return unless test.tags.docker they 'on stopped container', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm container: 'nikita_test_start' force: true - @docker.tools.service + await @docker.tools.service image: 'httpd' container: 'nikita_test_start' - @docker.stop + await @docker.stop container: 'nikita_test_start' {$status} = await @docker.start container: 'nikita_test_start' $status.should.be.true() - @docker.rm + await @docker.rm container: 'nikita_test_start' force: true they 'on started container', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm container: 'nikita_test_start' force: true - @docker.tools.service + await @docker.tools.service image: 'httpd' container: 'nikita_test_start' - @docker.stop + await @docker.stop container: 'nikita_test_start' - @docker.start + await @docker.start container: 'nikita_test_start' {$status} = await @docker.start container: 'nikita_test_start' $status.should.be.false() - @docker.rm + await @docker.rm container: 'nikita_test_start' force: true diff --git a/packages/docker/test/stop.coffee b/packages/docker/test/stop.coffee index 8f3db4d5e..6f56edcf2 100644 --- a/packages/docker/test/stop.coffee +++ b/packages/docker/test/stop.coffee @@ -1,40 +1,40 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.stop', -> + return unless test.tags.docker they 'on running container', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.tools.service + await @docker.tools.service image: 'httpd' container: 'nikita_test_stop' {$status} = await @docker.stop container: 'nikita_test_stop' $status.should.be.true() - @docker.rm + await @docker.rm container: 'nikita_test_stop' force: true they 'on stopped container', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.tools.service + await @docker.tools.service image: 'httpd' container: 'nikita_test_stop' - @docker.stop + await @docker.stop container: 'nikita_test_stop' {$status} = await @docker.stop container: 'nikita_test_stop' $status.should.be.false() - @docker.rm + await @docker.rm container: 'nikita_test_stop' force: true diff --git a/packages/docker/test/test.coffee b/packages/docker/test/test.coffee index 01108618d..ccc602447 100644 --- a/packages/docker/test/test.coffee +++ b/packages/docker/test/test.coffee @@ -1,41 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config - -# Cache images -return unless config.tags.docker -nikita = require '@nikitajs/core/lib' -they = require('mocha-they')(config.config) -they 'wait for docker daemon to listen', ({ssh}) -> - # Note, this particularly apply to docker compose environnements - # where the daemon take some time to be up and running - # Wait 10s before timeout - # It takes some time under heavy load like testing in parallel - nikita - $ssh: ssh - docker: config.docker - # .execute.wait - # command: 'docker ps' - # retry: 40 - # interval: 250 - .docker.tools.execute - $interval: 1000 # nor interval nor sleep seems implemented - $retry: 40 - command: 'ps' -they 'cache image to avoid timeout later', ({ssh}) -> - @timeout 0 - nikita - $ssh: ssh - docker: config.docker - .docker.pull image: 'httpd' +export default config.default diff --git a/packages/docker/test/tools/checksum.coffee b/packages/docker/test/tools/checksum.coffee index 8db53076e..42bcc9b34 100644 --- a/packages/docker/test/tools/checksum.coffee +++ b/packages/docker/test/tools/checksum.coffee @@ -1,33 +1,33 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require '../test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.tools.checksum', -> + return unless test.tags.docker they 'checksum on existing repository', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rmi + await @docker.rmi image: 'nikita/checksum' - {image} = await @docker.build + {image_id} = await @docker.build image: 'nikita/checksum' content: "FROM scratch\nCMD ['echo \"hello build from text #{Date.now()}\"']" {checksum} = await @docker.tools.checksum image: 'nikita/checksum' tag: 'latest' - checksum.should.startWith "sha256:#{image}" - @docker.rmi + checksum.should.startWith "sha256:#{image_id}" + await @docker.rmi image: 'nikita/checksum' they 'checksum on not existing repository', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> {checksum} = await @docker.tools.checksum image: 'nikita/invalid_checksum' diff --git a/packages/docker/test/tools/execute.coffee b/packages/docker/test/tools/execute.coffee index 5f758a249..4b1932f94 100644 --- a/packages/docker/test/tools/execute.coffee +++ b/packages/docker/test/tools/execute.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require '../test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.tools.execute', -> + return unless test.tags.docker describe 'schema', -> @@ -39,7 +39,7 @@ describe 'docker.tools.execute', -> it 'with a command', -> ( await nikita - docker: docker + docker: test.docker .docker.tools.execute command: 'version' ) @@ -48,7 +48,7 @@ describe 'docker.tools.execute', -> it 'with a global docker option', -> ( await nikita - docker: docker + docker: test.docker .docker.tools.execute command: '' opts: version: true diff --git a/packages/docker/test/tools/service.coffee b/packages/docker/test/tools/service.coffee index 7176b0692..060ab8686 100644 --- a/packages/docker/test/tools/service.coffee +++ b/packages/docker/test/tools/service.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require '../test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.tools.service', -> + return unless test.tags.docker describe 'schema', -> they 'honors docker.run', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker .docker.tools.service image: 'httpd' container: 'nikita_test_unique' @@ -23,7 +23,7 @@ describe 'docker.tools.service', -> they 'overwrite default', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker .docker.tools.service image: 'httpd' container: 'nikita_test_unique' @@ -35,19 +35,19 @@ describe 'docker.tools.service', -> they 'simple service', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm force: true container: 'nikita_test_unique' - @docker.tools.service + await @docker.tools.service image: 'httpd' container: 'nikita_test_unique' port: '499:80' # .wait_connect # port: 499 # host: ipadress of docker, docker-machine... - @docker.rm + await @docker.rm force: true container: 'nikita_test_unique' @@ -76,12 +76,12 @@ describe 'docker.tools.service', -> they 'status not modified', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm force: true container: 'nikita_test' - @docker.tools.service + await @docker.tools.service container: 'nikita_test' image: 'httpd' port: '499:80' @@ -90,6 +90,6 @@ describe 'docker.tools.service', -> image: 'httpd' port: '499:80' $status.should.be.false() - @docker.rm + await @docker.rm force: true container: 'nikita_test' diff --git a/packages/docker/test/tools/status.coffee b/packages/docker/test/tools/status.coffee index 3e0e43f19..b0635c426 100644 --- a/packages/docker/test/tools/status.coffee +++ b/packages/docker/test/tools/status.coffee @@ -1,21 +1,21 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require '../test' -they = require('mocha-they')(config) - -return unless tags.docker +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.tools.status', -> + return unless test.tags.docker they 'on stopped container', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm container: 'nikita_status' force: true - @docker.run + await @docker.run command: "/bin/echo 'test'" image: 'alpine' rm: false @@ -23,25 +23,25 @@ describe 'docker.tools.status', -> {$status} = await @docker.tools.status container: 'nikita_status' $status.should.be.false() - @docker.rm + await @docker.rm container: 'nikita_status' force: true they 'on running container', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm container: 'nikita_status' force: true - @docker.tools.service + await @docker.tools.service image: 'httpd' port: [ '500:80' ] container: 'nikita_status' {$status} = await @docker.tools.status container: 'nikita_status' $status.should.be.true() - @docker.rm + await @docker.rm container: 'nikita_status' force: true diff --git a/packages/docker/test/volume_create.coffee b/packages/docker/test/volume_create.coffee index 547571b49..442e8c4b9 100644 --- a/packages/docker/test/volume_create.coffee +++ b/packages/docker/test/volume_create.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker or tags.docker_volume +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.volume_create', -> + return unless test.tags.docker or test.tags.docker_volume describe 'schema', -> it 'cast label string to array', -> ( await nikita - docker: docker + docker: test.docker .docker.volume_create label: 'test' , ({config: {label}}) => label @@ -22,7 +22,7 @@ describe 'docker.volume_create', -> it 'cast opt string to array', -> ( await nikita - docker: docker + docker: test.docker .docker.volume_create opt: 'test' , ({config: {opt}}) => opt @@ -34,9 +34,9 @@ describe 'docker.volume_create', -> they 'a named volume', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.volume_rm + await @docker.volume_rm name: 'my_volume' {$status} = await @docker.volume_create name: 'my_volume' @@ -44,5 +44,5 @@ describe 'docker.volume_create', -> {$status} = await @docker.volume_create name: 'my_volume' $status.should.be.false() - @docker.volume_rm + await @docker.volume_rm name: 'my_volume' diff --git a/packages/docker/test/volume_rm.coffee b/packages/docker/test/volume_rm.coffee index edc851b1f..ad79ec25e 100644 --- a/packages/docker/test/volume_rm.coffee +++ b/packages/docker/test/volume_rm.coffee @@ -1,19 +1,19 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker or tags.docker_volume +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.volume_rm', -> + return unless test.tags.docker or test.tags.docker_volume describe 'schema', -> it 'principal, keyta and password must be provided', -> nikita - docker: docker + docker: test.docker , -> - @docker.volume_rm {} + await @docker.volume_rm {} .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' message: [ @@ -27,11 +27,11 @@ describe 'docker.volume_rm', -> they 'a named volume', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.volume_rm + await @docker.volume_rm name: 'my_volume' - @docker.volume_create + await @docker.volume_create name: 'my_volume' {$status} = await @docker.volume_rm name: 'my_volume' diff --git a/packages/docker/test/wait.coffee b/packages/docker/test/wait.coffee index 7cb1ce2bb..e94055b3d 100644 --- a/packages/docker/test/wait.coffee +++ b/packages/docker/test/wait.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, docker} = require './test' -they = require('mocha-they')(config) - -return unless tags.docker or tags.docker_volume +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'docker.wait', -> + return unless test.tags.docker or test.tags.docker_volume they 'container already started', ({ssh}) -> nikita $ssh: ssh - docker: docker + docker: test.docker , -> - @docker.rm + await @docker.rm container: 'nikita_test_wait' force: true await @docker.tools.service @@ -24,7 +24,7 @@ describe 'docker.wait', -> , 50 {$status} = await nikita $ssh: ssh - docker: docker + docker: test.docker .docker.wait container: 'nikita_test_wait' $status.should.be.true() diff --git a/packages/file/README.md b/packages/file/README.md index 2017d9af8..b5d2ce100 100644 --- a/packages/file/README.md +++ b/packages/file/README.md @@ -2,3 +2,20 @@ # Nikita "file" package The "file" package provides Nikita actions to work with files. + +## Usage + +```js +import "@nikitajs/file/register"; +import nikita from "@nikitajs/core"; + +const {$status} = await nikita.file.yaml({ + content: { + preference: { + color: "orange", + }, + }, + target: "~/config.yml" +}); +console.info("File was modified:", $status); +``` diff --git a/packages/file/lib/cache/index.js b/packages/file/lib/cache/index.js index 96ff34a0f..ff5c23fe5 100644 --- a/packages/file/lib/cache/index.js +++ b/packages/file/lib/cache/index.js @@ -1,23 +1,23 @@ // Dependencies -const path = require('path'); -const url = require('url'); -const utils = require('../utils'); -const definitions = require('./schema.json'); - -const protocols_http = ['http:', 'https:']; -module.exports.protocols_http = protocols_http; -const protocols_ftp = ['ftp:', 'ftps:']; -module.exports.protocols_ftp = protocols_ftp; +import path from 'node:path' +import url from "node:url"; +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Errors const errors = { NIKITA_FILE_INVALID_TARGET_HASH: function({config, hash, _hash}) { - return utils.error('NIKITA_FILE_INVALID_TARGET_HASH', [`target ${JSON.stringify(config.target)} got ${hash} instead of ${_hash}`]); + return utils.error('NIKITA_FILE_INVALID_TARGET_HASH', [`target ${JSON.stringify(config.target)} got ${JSON.stringify(hash)} instead of ${JSON.stringify(_hash)}.`]); } }; +const protocols_http = ['http:', 'https:']; +const protocols_ftp = ['ftp:', 'ftps:']; + +export { protocols_http, protocols_ftp }; + // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { if (config.target == null) { config.target = config.cache_file; diff --git a/packages/file/lib/cson/index.js b/packages/file/lib/cson/index.js index 25d614104..cb2e43ab0 100644 --- a/packages/file/lib/cson/index.js +++ b/packages/file/lib/cson/index.js @@ -1,10 +1,10 @@ // ## Dependencies -const {merge} = require('mixme'); -const cson = require('cson'); -const definitions = require('./schema.json'); +import {merge} from 'mixme'; +import cson from 'cson'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { if (config.merge) { log({ diff --git a/packages/file/lib/cson/schema.json b/packages/file/lib/cson/schema.json index a7cc25d9b..3209437ab 100644 --- a/packages/file/lib/cson/schema.json +++ b/packages/file/lib/cson/schema.json @@ -3,32 +3,32 @@ "type": "object", "properties": { "backup": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/backup" + "$ref": "module://@nikitajs/file#/definitions/config/properties/backup" }, "content": { "type": "object", "description": "Object to stringify." }, "encoding": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/encoding", + "$ref": "module://@nikitajs/file#/definitions/config/properties/encoding", "default": "utf8" }, "mode": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/file#/definitions/config/properties/mode" }, "merge": { "type": "boolean", "description": "Read the target if it exists and merge its content." }, "target": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/target", + "$ref": "module://@nikitajs/file#/definitions/config/properties/target", "description": "File path where to write content to or a function that returns a valid\nfile path." }, "uid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/uid" }, "gid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/gid" } }, "required": [ diff --git a/packages/file/lib/download/README.md b/packages/file/lib/download/README.md index d312fad68..27a5701df 100644 --- a/packages/file/lib/download/README.md +++ b/packages/file/lib/download/README.md @@ -16,8 +16,8 @@ A checksum may provided with the option "sha256", "sha1" or "md5" to validate th file signature. Caching is active if "cache_dir" or "cache_file" are defined to anything but false. -If cache_dir is not a string, default value is './' -If cache_file is not a string, default is source basename. +If cache_dir is not a string, default value is `./`. If cache_file is not a +string, default is source basename. Nikita resolve the path from "cache_dir" to "cache_file", so if cache_file is an absolute path, "cache_dir" will be ignored diff --git a/packages/file/lib/download/index.js b/packages/file/lib/download/index.js index e19963c97..7717fd30b 100644 --- a/packages/file/lib/download/index.js +++ b/packages/file/lib/download/index.js @@ -1,13 +1,12 @@ // Dependencies -const fs = require('fs'); -const url = require('url'); -const utils = require('../utils'); -const definitions = require('./schema.json'); - -const esa = utils.string.escapeshellarg; +import fs from "node:fs"; +import url from "node:url"; +import utils from "@nikitajs/file/utils"; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, ssh, tools: { log, path } }) { // Only move the file at the end of action if match is true let match = false; @@ -27,7 +26,7 @@ module.exports = { algo = "sha1"; source_hash = config.sha1; } else if (config.sha256 != null) { - const sha256Type = typeof config.sha256 + const sha256Type = typeof config.sha256; if (sha256Type !== "string" && sha256Type !== "boolean") { throw Error(`Invalid SHA-256 Hash:${config.sha256}`); } @@ -39,10 +38,7 @@ module.exports = { const protocols_http = ["http:", "https:"]; // const protocols_ftp = ["ftp:", "ftps:"]; if (config.force) { - log({ - message: `Using force: ${JSON.stringify(config.force)}`, - level: "DEBUG", - }); + log("DEBUG", `Using force: ${JSON.stringify(config.force)}`); } let source_url = url.parse(config.source); if (config.cache == null && source_url.protocol === null) { @@ -78,10 +74,7 @@ module.exports = { $shy: true, }, async function () { - log({ - message: "Shortcircuit check if provided hash match target", - level: "WARN", - }); + log("WARN", "Shortcircuit check if provided hash match target"); try { const { hash } = await this.fs.hash(config.target, { algo: algo, @@ -102,10 +95,7 @@ module.exports = { if (shortcircuit) { return true; } - log({ - message: "Destination with valid signature, download aborted", - level: "INFO", - }); + log("INFO", "Destination with valid signature, download aborted"); } // Download the file and place it inside local cache // Overwrite the config.source and source_url properties to make them @@ -135,10 +125,7 @@ module.exports = { target: config.target, }); if (utils.stats.isDirectory(stats != null ? stats.mode : void 0)) { - log({ - message: "Destination is a directory", - level: "DEBUG", - }); + log("DEBUG", "Destination is a directory"); config.target = path.join(config.target, path.basename(config.source)); } } catch (error) { @@ -150,10 +137,7 @@ module.exports = { Math.random() * 1000 )}`; if (protocols_http.includes(source_url.protocol) === true) { - log({ - message: "HTTP download target url", - level: "DEBUG", - }); + log("DEBUG", "HTTP download target url"); // Ensure target directory exists await this.fs.mkdir({ $shy: true, @@ -188,37 +172,29 @@ module.exports = { const { exists } = await this.fs.base.exists({ target: config.target, }); - const { hash: hash_target } = exists && await this.fs.hash({ - target: config.target, - algo: algo, - }); + const { hash: hash_target } = + exists && + (await this.fs.hash({ + target: config.target, + algo: algo, + })); match = hash_source === hash_target; - log( - match - ? { - message: `Hash matches as ${JSON.stringify(hash_source)}`, - level: "INFO", - module: "nikita/lib/file/download", - } - : { - message: `Hash dont match, source is ${JSON.stringify(hash_source)} and target is ${JSON.stringify(hash_target)}`, - level: "WARN", - module: "nikita/lib/file/download", - } - ); + match + ? log("INFO", `Hash matches as "${hash_source}".`) + : log( + "WARN", + `Hash dont match, source is "${hash_source}" and target is "${hash_target}".` + ); if (match) { await this.fs.remove({ $shy: true, target: stageDestination, }); } - } else if ( - protocols_http.includes(source_url.protocol) === false && !ssh - ) { - log({ - message: `File download without ssh (cache ${config.cache ? "enabled" : "disabled"})`, - level: "DEBUG", - }); + } else if (protocols_http.includes(source_url.protocol) === false && !ssh) { + log("DEBUG", `File download without ssh (cache ${ + config.cache ? "enabled" : "disabled" + })`); const { hash: hash_source } = await this.fs.hash({ target: config.source, algo: algo, @@ -226,24 +202,19 @@ module.exports = { const { exists } = await this.fs.base.exists({ target: config.target, }); - const { hash: hash_target } = exists && await this.fs.hash({ - target: config.target, - algo: algo, - }); + const { hash: hash_target } = + exists && + (await this.fs.hash({ + target: config.target, + algo: algo, + })); match = hash_source === hash_target; - log( - match - ? { - message: `Hash matches as ${JSON.stringify(hash_source)}`, - level: "INFO", - module: "nikita/lib/file/download", - } - : { - message: `Hash dont match, source is ${JSON.stringify(hash_source)} and target is ${JSON.stringify(hash_target)}`, - level: "WARN", - module: "nikita/lib/file/download", - } - ); + match + ? log("INFO", `Hash matches as "${hash_source}'`) + : log( + "WARN", + `Hash dont match, source is "${hash_source}" and target is "${hash_target}"` + ); if (!match) { await this.fs.mkdir({ $shy: true, @@ -254,13 +225,10 @@ module.exports = { target: stageDestination, }); } - } else if ( - protocols_http.includes(source_url.protocol) === false && ssh - ) { - log({ - message: `File download with ssh (cache ${config.cache ? "enabled" : "disabled"})`, - level: "DEBUG", - }); + } else if (protocols_http.includes(source_url.protocol) === false && ssh) { + log("DEBUG", `File download with ssh (cache ${ + config.cache ? "enabled" : "disabled" + })`); const { hash: hash_source } = await this.fs.hash({ $ssh: false, $sudo: false, @@ -270,24 +238,19 @@ module.exports = { const { exists } = await this.fs.base.exists({ target: config.target, }); - const { hash: hash_target } = exists && await this.fs.hash({ - target: config.target, - algo: algo, - }); + const { hash: hash_target } = + exists && + (await this.fs.hash({ + target: config.target, + algo: algo, + })); match = hash_source === hash_target; - log( - match - ? { - message: `Hash matches as ${JSON.stringify(hash_source)}`, - level: "INFO", - module: "nikita/lib/file/download", - } - : { - message: `Hash dont match, source is ${JSON.stringify(hash_source)} and target is ${JSON.stringify(hash_target)}.`, - level: "WARN", - module: "nikita/lib/file/download", - } - ); + match + ? log("INFO", `Hash matches as "${hash_source}".`) + : log( + "WARN", + `Hash dont match, source is "${hash_source}" and target is "${hash_target}".` + ); if (!match) { await this.fs.mkdir({ $shy: true, @@ -300,27 +263,24 @@ module.exports = { return fs.createReadStream(config.source).pipe(ws); }, }); - log({ - message: `Downloaded local source ${JSON.stringify( + log( + "INFO", + `Downloaded local source ${JSON.stringify( config.source - )} to remote target ${JSON.stringify(stageDestination)}`, - level: "INFO", - }); + )} to remote target ${JSON.stringify(stageDestination)}.` + ); } catch (error) { - log({ - message: `Downloaded local source ${JSON.stringify( + log( + "ERROR", + `Downloaded local source ${JSON.stringify( config.source - )} to remote target ${JSON.stringify(stageDestination)} failed`, - level: "ERROR", - }); + )} to remote target ${JSON.stringify(stageDestination)} failed.` + ); throw error; } } } - log({ - message: "Unstage downloaded file", - level: "DEBUG", - }); + log("DEBUG", "Unstage downloaded file"); if (!match) { await this.fs.move({ source: stageDestination, @@ -342,31 +302,18 @@ module.exports = { } }, hooks: { - on_action: async function({ - config, - tools: {find} - }) { - config.cache = await find(function({ - config: {cache} - }) { - return cache; - }); - config.cache_file = await find(function({ - config: {cache_file} - }) { - return cache_file; - }); - config.cache_dir = await find(function({ - config: {cache_dir} - }) { - return cache_dir; - }); - if (/^file:\/\//.test(config.source)) { - return config.source = config.source.substr(7); - } - } + on_action: async function ({ config, tools: { find } }) { + config.cache = await find(({ config: { cache } }) => cache); + config.cache_file = await find( + ({ config: { cache_file } }) => cache_file + ); + config.cache_dir = await find(({ config: { cache_dir } }) => cache_dir); + if (/^file:\/\//.test(config.source)) { + return (config.source = config.source.substr(7)); + } + }, }, metadata: { - definitions: definitions - } + definitions: definitions, + }, }; diff --git a/packages/file/lib/download/schema.json b/packages/file/lib/download/schema.json index a4f7ea99a..8d1d37b5d 100644 --- a/packages/file/lib/download/schema.json +++ b/packages/file/lib/download/schema.json @@ -33,7 +33,7 @@ "description": "Force cache overwrite if it exists" }, "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/gid" }, "http_headers": { "type": "array", @@ -55,7 +55,7 @@ "description": "Validate uploaded file with md5 checksum (only for binary upload for\nnow), may be the string checksum or will be deduced from source if\n\"true\"." }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chmod#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/chmod#/definitions/config/properties/mode" }, "proxy": { "type": "string", @@ -93,7 +93,7 @@ "description": "File path where to write content to. Pass the content." }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/file/lib/index.js b/packages/file/lib/index.js index b6e601f6b..92416094b 100644 --- a/packages/file/lib/index.js +++ b/packages/file/lib/index.js @@ -1,21 +1,17 @@ // Dependencies -const path = require('path'); -const utils = require('./utils'); -const definitions = require('./schema.json'); +import path from "node:path"; +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { // Content: pass all arguments to function calls const context = arguments[0]; - log({ - message: `Source is \"${config.source}\"`, - level: "DEBUG", - }); - log({ - message: `Destination is \"${config.target}\"`, - level: "DEBUG", - }); + if(config.source){ + log("DEBUG", `Source is ${JSON.stringify(config.source)}.`); + } + log("DEBUG", `Write to destination ${JSON.stringify(config.target)}.`); if (typeof config.content === "function") { config.content = config.content.call(this, context); } @@ -35,7 +31,8 @@ module.exports = { case "unicode": config.eof = "\u2028"; } - let targetContent = targetContentHash = null; + let targetContent = null; + let targetContentHash = null; if (config.write == null) { config.write = []; } @@ -332,20 +329,27 @@ module.exports = { } }, hooks: { - on_action: function({config}) { - if (!((config.source || config.content != null) || config.replace != null || config.write != null)) { + on_action: function ({ config }) { + if ( + !( + config.source || + config.content != null || + config.replace != null || + config.write != null + ) + ) { // Validate parameters // TODO: try to express this in JSON schema - throw Error('Missing source or content or replace or write'); + throw Error("Missing source or content or replace or write"); } - if (config.source && (config.content != null)) { - throw Error('Define either source or content'); + if (config.source && config.content != null) { + throw Error("Define either source or content"); } if (config.content) { - if (typeof config.content === 'number') { - return config.content = `${config.content}`; + if (typeof config.content === "number") { + return (config.content = `${config.content}`); } else if (Buffer.isBuffer(config.content)) { - return config.content = config.content.toString(); + return (config.content = config.content.toString()); } } }, diff --git a/packages/file/lib/ini/README.md b/packages/file/lib/ini/README.md index bc3156ff7..9d16b064d 100644 --- a/packages/file/lib/ini/README.md +++ b/packages/file/lib/ini/README.md @@ -17,7 +17,7 @@ provided in the `content` option. Available values for the `stringify` option are: * `stringify` - Default, implemented by `require('nikita/file/lib/utils/ini').stringify` + Default, implemented by `nikita/file/utils/ini#stringify` The default stringify function accepts: diff --git a/packages/file/lib/ini/index.js b/packages/file/lib/ini/index.js index a759e06e3..36462b100 100644 --- a/packages/file/lib/ini/index.js +++ b/packages/file/lib/ini/index.js @@ -1,10 +1,10 @@ // Dependencies -const utils = require('../utils'); -const {merge} = require('mixme'); -const definitions = require('./schema.json'); +import utils from "@nikitajs/file/utils"; +import {merge} from 'mixme'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { let content; let org_props = {}; diff --git a/packages/file/lib/ini/read/index.js b/packages/file/lib/ini/read/index.js index 49eb06a45..aea9f9495 100644 --- a/packages/file/lib/ini/read/index.js +++ b/packages/file/lib/ini/read/index.js @@ -1,10 +1,10 @@ // Dependencies -const {merge} = require('mixme'); -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import {merge} from 'mixme'; +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const parse = config.parse || utils.ini.parse; const {data} = (await this.fs.base.readFile({ diff --git a/packages/file/lib/ini/read/schema.json b/packages/file/lib/ini/read/schema.json index 61fab4e6a..8e1c245e3 100644 --- a/packages/file/lib/ini/read/schema.json +++ b/packages/file/lib/ini/read/schema.json @@ -9,7 +9,7 @@ }, "parse": { "typeof": "function", - "description": "User-defined function to parse the content from ini format, default to\n`require('ini').parse`, see\n'nikita.file.utils.ini.parse_multi_brackets'. " + "description": "User-defined function to parse the content from ini format, default to\n`@nikitajs/file/utils/ini#parse`. " }, "target": { "type": "string", diff --git a/packages/file/lib/ini/schema.json b/packages/file/lib/ini/schema.json index f1c6e412f..e8f1c1404 100644 --- a/packages/file/lib/ini/schema.json +++ b/packages/file/lib/ini/schema.json @@ -28,7 +28,7 @@ "description": "Characters for line delimiter, usage depends on the stringify option,\nwith the default stringify option, default to unix style if executed\nremotely (SSH) or to the platform if executed locally (\"\r\n for\nwindows\", \"\n\" otherwise). The name stands for End Of Line." }, "encoding": { - "$ref": "module://@nikitajs/file/lib/ini/read#/definitions/config/properties/encoding", + "$ref": "module://@nikitajs/file/ini/read#/definitions/config/properties/encoding", "default": "utf8" }, "escape": { @@ -37,7 +37,7 @@ "description": "Escape the section's header title replace '.' by '.'; \"true\" by\ndefault." }, "gid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/gid" }, "local": { "type": "boolean", @@ -48,17 +48,17 @@ "description": "Read the target if it exists and merge its content." }, "mode": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/file#/definitions/config/properties/mode" }, "parse": { - "$ref": "module://@nikitajs/file/lib/ini/read#/definitions/config/properties/parse" + "$ref": "module://@nikitajs/file/ini/read#/definitions/config/properties/parse" }, "stringify": { "typeof": "function", - "description": "User-defined function to stringify the content to ini format, default\nto `require('ini').stringify`, see\n'nikita.file.utils.ini.stringify_brackets_then_curly' for an\nexample." + "description": "User-defined function to stringify the content to ini format, default\nto `@nikitajs/file/utils/ini#stringify`." }, "source": { - "$ref": "module://@nikitajs/file/lib/ini/read#/definitions/config/properties/target", + "$ref": "module://@nikitajs/file/ini/read#/definitions/config/properties/target", "description": "Path to a ini file providing default options; lower precedence than\nthe content object; may be used conjointly with the local option;\noptional, use should_exists to enforce its presence." }, "target": { @@ -66,7 +66,7 @@ "description": "File path where to write content to or a callback." }, "uid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/file/lib/json/index.js b/packages/file/lib/json/index.js index 13bac6a23..95263748a 100644 --- a/packages/file/lib/json/index.js +++ b/packages/file/lib/json/index.js @@ -1,9 +1,9 @@ // Dependencies -const {merge} = require('mixme'); -const definitions = require('./schema.json'); +import {merge} from 'mixme'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (config.merge) { try { diff --git a/packages/file/lib/properties/index.js b/packages/file/lib/properties/index.js index 79b4fc4b0..cbb60ef44 100644 --- a/packages/file/lib/properties/index.js +++ b/packages/file/lib/properties/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require("./schema.json"); -const utils = require("../utils"); +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { // Trim let fnl_props = config.trim diff --git a/packages/file/lib/properties/read/index.js b/packages/file/lib/properties/read/index.js index 7de05f8a6..c4f0d9eb4 100644 --- a/packages/file/lib/properties/read/index.js +++ b/packages/file/lib/properties/read/index.js @@ -1,9 +1,9 @@ // Dependencies -const quote = require('regexp-quote'); -const definitions = require('./schema.json'); +import quote from "regexp-quote"; +import definitions from "./schema.json" assert { type: "json" }; // Actions -module.exports = { +export default { handler: async function({config}) { const {data} = (await this.fs.base.readFile({ target: config.target, diff --git a/packages/file/lib/properties/read/schema.json b/packages/file/lib/properties/read/schema.json index c22dedc64..6de18c85d 100644 --- a/packages/file/lib/properties/read/schema.json +++ b/packages/file/lib/properties/read/schema.json @@ -8,7 +8,7 @@ "description": "Preserve comments, key is the comment while value is \"null\"." }, "encoding": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/encoding", + "$ref": "module://@nikitajs/file#/definitions/config/properties/encoding", "default": "utf8" }, "separator": { diff --git a/packages/file/lib/properties/schema.json b/packages/file/lib/properties/schema.json index f61316353..abe840b19 100644 --- a/packages/file/lib/properties/schema.json +++ b/packages/file/lib/properties/schema.json @@ -3,10 +3,10 @@ "type": "object", "properties": { "backup": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/backup" + "$ref": "module://@nikitajs/file#/definitions/config/properties/backup" }, "comment": { - "$ref": "module://@nikitajs/file/lib/properties/read#/definitions/config/properties/comment" + "$ref": "module://@nikitajs/file/properties/read#/definitions/config/properties/comment" }, "content": { "type": "object", @@ -19,11 +19,11 @@ "description": "Merges content properties with target file." }, "local": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/local" + "$ref": "module://@nikitajs/file#/definitions/config/properties/local" }, "separator": { "default": "=", - "$ref": "module://@nikitajs/file/lib/properties/read#/definitions/config/properties/separator" + "$ref": "module://@nikitajs/file/properties/read#/definitions/config/properties/separator" }, "sort": { "type": "boolean", @@ -31,10 +31,10 @@ "description": "Sort the properties before writting them." }, "target": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/target" + "$ref": "module://@nikitajs/file#/definitions/config/properties/target" }, "trim": { - "$ref": "module://@nikitajs/file/lib/properties/read#/definitions/config/properties/trim" + "$ref": "module://@nikitajs/file/properties/read#/definitions/config/properties/trim" } }, "required": [ diff --git a/packages/file/lib/register.js b/packages/file/lib/register.js index 8e1e8d783..d61211142 100644 --- a/packages/file/lib/register.js +++ b/packages/file/lib/register.js @@ -1,50 +1,43 @@ // Dependencies -const registry = require('@nikitajs/core/lib/registry'); +import registry from "@nikitajs/core/registry"; // Action registration -module.exports = { +const actions = { file: { - '': '@nikitajs/file/lib', - cache: '@nikitajs/file/lib/cache', - cson: '@nikitajs/file/lib/cson', - download: '@nikitajs/file/lib/download', + '': '@nikitajs/file', + cache: '@nikitajs/file/cache', + cson: '@nikitajs/file/cson', + download: '@nikitajs/file/download', ini: { - '': '@nikitajs/file/lib/ini', - 'read': '@nikitajs/file/lib/ini/read' + '': '@nikitajs/file/ini', + 'read': '@nikitajs/file/ini/read' }, - json: '@nikitajs/file/lib/json', + json: '@nikitajs/file/json', properties: { - '': '@nikitajs/file/lib/properties', - read: '@nikitajs/file/lib/properties/read' + '': '@nikitajs/file/properties', + read: '@nikitajs/file/properties/read' }, - render: '@nikitajs/file/lib/render', - touch: '@nikitajs/file/lib/touch', + render: '@nikitajs/file/render', + touch: '@nikitajs/file/touch', types: { 'systemd': { - 'resolved': '@nikitajs/file/lib/types/systemd/resolved', - 'timesyncd': '@nikitajs/file/lib/types/systemd/timesyncd' + 'resolved': '@nikitajs/file/types/systemd/resolved', + 'timesyncd': '@nikitajs/file/types/systemd/timesyncd' }, - 'ceph_conf': '@nikitajs/file/lib/types/ceph_conf', - 'hfile': '@nikitajs/file/lib/types/hfile', - 'krb5_conf': '@nikitajs/file/lib/types/krb5_conf', - 'locale_gen': '@nikitajs/file/lib/types/locale_gen', - 'my_cnf': '@nikitajs/file/lib/types/my_cnf', - 'pacman_conf': '@nikitajs/file/lib/types/pacman_conf', - 'ssh_authorized_keys': '@nikitajs/file/lib/types/ssh_authorized_keys', - 'wireguard_conf': '@nikitajs/file/lib/types/wireguard_conf', - 'yum_repo': '@nikitajs/file/lib/types/yum_repo' + 'ceph_conf': '@nikitajs/file/types/ceph_conf', + 'hfile': '@nikitajs/file/types/hfile', + 'krb5_conf': '@nikitajs/file/types/krb5_conf', + 'locale_gen': '@nikitajs/file/types/locale_gen', + 'my_cnf': '@nikitajs/file/types/my_cnf', + 'pacman_conf': '@nikitajs/file/types/pacman_conf', + 'ssh_authorized_keys': '@nikitajs/file/types/ssh_authorized_keys', + 'wireguard_conf': '@nikitajs/file/types/wireguard_conf', + 'yum_repo': '@nikitajs/file/types/yum_repo' }, - upload: '@nikitajs/file/lib/upload', - yaml: '@nikitajs/file/lib/yaml' + upload: '@nikitajs/file/upload', + yaml: '@nikitajs/file/yaml' } }; -(async function() { - try { - return (await registry.register(module.exports)); - } catch (error) { - console.error(error.stack); - return process.exit(1); - } -})(); +await registry.register(actions) diff --git a/packages/file/lib/render/index.js b/packages/file/lib/render/index.js index 07c1b05d2..5786d4f65 100644 --- a/packages/file/lib/render/index.js +++ b/packages/file/lib/render/index.js @@ -1,10 +1,10 @@ // Dependencies -const path = require('path'); -const handlebars = require('handlebars'); -const definitions = require('./schema.json'); +import path from 'node:path' +import handlebars from 'handlebars'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { // Read source if (config.source) { diff --git a/packages/file/lib/render/schema.json b/packages/file/lib/render/schema.json index cc2bffbde..f0e3c3e90 100644 --- a/packages/file/lib/render/schema.json +++ b/packages/file/lib/render/schema.json @@ -3,34 +3,34 @@ "type": "object", "properties": { "content": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/content" + "$ref": "module://@nikitajs/file#/definitions/config/properties/content" }, "context": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/context" + "$ref": "module://@nikitajs/file#/definitions/config/properties/context" }, "engine": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/engine" + "$ref": "module://@nikitajs/file#/definitions/config/properties/engine" }, "gid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/gid" }, "mode": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/file#/definitions/config/properties/mode" }, "local": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/local" + "$ref": "module://@nikitajs/file#/definitions/config/properties/local" }, "remove_empty_lines": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/remove_empty_lines" + "$ref": "module://@nikitajs/file#/definitions/config/properties/remove_empty_lines" }, "source": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/source" + "$ref": "module://@nikitajs/file#/definitions/config/properties/source" }, "target": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/target" + "$ref": "module://@nikitajs/file#/definitions/config/properties/target" }, "uid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/file/lib/schema.json b/packages/file/lib/schema.json index 23c0178d3..d7001ad52 100644 --- a/packages/file/lib/schema.json +++ b/packages/file/lib/schema.json @@ -25,7 +25,7 @@ "description": "Create a backup, append a provided string to the filename extension or\na timestamp if value is not a string, only apply if the target file\nexists and is modified." }, "backup_mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chmod#/definitions/config/properties/mode", + "$ref": "module://@nikitajs/core/actions/fs/chmod#/definitions/config/properties/mode", "description": "Backup file mode (permission and sticky bits), defaults to `0o0400`,\nin the form of `{mode: 0o0400}` or `{mode: \"0400\"}`." }, "content": { @@ -76,7 +76,7 @@ "description": "Name of the marker from where the content will be replaced." }, "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/gid" }, "local": { "type": "boolean", @@ -95,7 +95,7 @@ "description": "Replace this marker, default to the replaced string if missing." }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chmod#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/chmod#/definitions/config/properties/mode" }, "place_before": { "oneOf": [ @@ -152,7 +152,7 @@ "description": "Name of the marker until where the content will be replaced." }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/uid" }, "unlink": { "type": "boolean", diff --git a/packages/file/lib/touch/index.js b/packages/file/lib/touch/index.js index 0b23d8053..5967d48ae 100644 --- a/packages/file/lib/touch/index.js +++ b/packages/file/lib/touch/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { const { $status } = await this.call(async function () { log({ diff --git a/packages/file/lib/touch/schema.json b/packages/file/lib/touch/schema.json index a3d2a0e7e..7bc14a575 100644 --- a/packages/file/lib/touch/schema.json +++ b/packages/file/lib/touch/schema.json @@ -3,10 +3,10 @@ "type": "object", "properties": { "gid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/gid" }, "mode": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/file#/definitions/config/properties/mode" }, "target": { "oneOf": [ @@ -20,7 +20,7 @@ "description": "File path where to write file or a function that returns a valid file\npath." }, "uid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/file/lib/types/ceph_conf/index.js b/packages/file/lib/types/ceph_conf/index.js index f0d261ecc..15170ae50 100644 --- a/packages/file/lib/types/ceph_conf/index.js +++ b/packages/file/lib/types/ceph_conf/index.js @@ -1,10 +1,10 @@ // Dependencies -const path = require('path'); -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import path from 'node:path' +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (config.rootdir) { config.target = `${path.join(config.rootdir, config.target)}`; diff --git a/packages/file/lib/types/hfile/index.js b/packages/file/lib/types/hfile/index.js index 760381d7b..47f4a0c77 100644 --- a/packages/file/lib/types/hfile/index.js +++ b/packages/file/lib/types/hfile/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); -const utils = require('../../utils'); +import definitions from "./schema.json" assert { type: "json" }; +import utils from "@nikitajs/file/utils"; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { let org_props = {}; let fnl_props = {}; diff --git a/packages/file/lib/types/hfile/schema.json b/packages/file/lib/types/hfile/schema.json index 35d72598b..cc46fe8cb 100644 --- a/packages/file/lib/types/hfile/schema.json +++ b/packages/file/lib/types/hfile/schema.json @@ -34,32 +34,32 @@ "description": "User defined function used to transform properties." }, "backup": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/backup" + "$ref": "module://@nikitajs/file#/definitions/config/properties/backup" }, "backup_mode": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/backup_mode" + "$ref": "module://@nikitajs/file#/definitions/config/properties/backup_mode" }, "eof": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/eof" + "$ref": "module://@nikitajs/file#/definitions/config/properties/eof" }, "encoding": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/encoding", + "$ref": "module://@nikitajs/file#/definitions/config/properties/encoding", "default": "utf8" }, "uid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/uid" }, "gid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/gid" }, "mode": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/file#/definitions/config/properties/mode" }, "local": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/local" + "$ref": "module://@nikitajs/file#/definitions/config/properties/local" }, "unlink": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/unlink" + "$ref": "module://@nikitajs/file#/definitions/config/properties/unlink" } } } diff --git a/packages/file/lib/types/krb5_conf/index.js b/packages/file/lib/types/krb5_conf/index.js index bbd672f07..18eec6c2f 100644 --- a/packages/file/lib/types/krb5_conf/index.js +++ b/packages/file/lib/types/krb5_conf/index.js @@ -1,9 +1,9 @@ // Dependencies -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { return await this.file.ini({ parse: utils.ini.parse_brackets_then_curly, diff --git a/packages/file/lib/types/locale_gen/index.js b/packages/file/lib/types/locale_gen/index.js index 37b6e3409..705c1004a 100644 --- a/packages/file/lib/types/locale_gen/index.js +++ b/packages/file/lib/types/locale_gen/index.js @@ -1,9 +1,9 @@ // Dependencies -const path = require('path'); -const definitions = require('./schema.json'); +import path from 'node:path' +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (config.rootdir) { config.target = `${path.join(config.rootdir, config.target)}`; diff --git a/packages/file/lib/types/my_cnf/index.js b/packages/file/lib/types/my_cnf/index.js index 06489bf35..23c36ac0d 100644 --- a/packages/file/lib/types/my_cnf/index.js +++ b/packages/file/lib/types/my_cnf/index.js @@ -1,9 +1,9 @@ // Dependencies -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { return await this.file.ini({ stringify: utils.ini.stringify_single_key diff --git a/packages/file/lib/types/pacman_conf/index.js b/packages/file/lib/types/pacman_conf/index.js index 54e84c066..257cdff51 100644 --- a/packages/file/lib/types/pacman_conf/index.js +++ b/packages/file/lib/types/pacman_conf/index.js @@ -1,10 +1,10 @@ // Dependencies -const path = require('path'); -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import path from 'node:path' +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (config.rootdir) { config.target = `${path.join(config.rootdir, config.target)}`; diff --git a/packages/file/lib/types/ssh_authorized_keys/index.js b/packages/file/lib/types/ssh_authorized_keys/index.js index c6173c097..6c4d67a1d 100644 --- a/packages/file/lib/types/ssh_authorized_keys/index.js +++ b/packages/file/lib/types/ssh_authorized_keys/index.js @@ -1,10 +1,10 @@ // ## Dependencies -const path = require('path'); -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import path from 'node:path' +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { await this.fs.assert({ target: path.dirname(config.target) diff --git a/packages/file/lib/types/ssh_authorized_keys/schema.json b/packages/file/lib/types/ssh_authorized_keys/schema.json index 14983eec9..bd61f62ef 100644 --- a/packages/file/lib/types/ssh_authorized_keys/schema.json +++ b/packages/file/lib/types/ssh_authorized_keys/schema.json @@ -15,7 +15,7 @@ "description": "Read the target if it exists and merge its content." }, "mode": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/file#/definitions/config/properties/mode" }, "target": { "type": "string", diff --git a/packages/file/lib/types/systemd/resolved/index.js b/packages/file/lib/types/systemd/resolved/index.js index 675118bfc..86c10865c 100644 --- a/packages/file/lib/types/systemd/resolved/index.js +++ b/packages/file/lib/types/systemd/resolved/index.js @@ -1,10 +1,10 @@ // Dependencies -const path = require('path'); -const dedent = require('dedent'); -const definitions = require('./schema.json'); +import path from 'node:path' +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { if (config.rootdir) { // Configs diff --git a/packages/file/lib/types/systemd/timesyncd/index.js b/packages/file/lib/types/systemd/timesyncd/index.js index 997d43251..32ec0cce4 100644 --- a/packages/file/lib/types/systemd/timesyncd/index.js +++ b/packages/file/lib/types/systemd/timesyncd/index.js @@ -1,10 +1,10 @@ // Dependencies -const path = require('path'); -const dedent = require('dedent'); -const definitions = require('./schema.json'); +import path from 'node:path' +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (config.rootdir) { config.target = `${path.join(config.rootdir, config.target)}`; diff --git a/packages/file/lib/types/wireguard_conf/index.js b/packages/file/lib/types/wireguard_conf/index.js index efc3cdef0..93ad0a066 100644 --- a/packages/file/lib/types/wireguard_conf/index.js +++ b/packages/file/lib/types/wireguard_conf/index.js @@ -1,10 +1,10 @@ // Dependencies -const path = require('path'); -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import path from 'node:path' +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (config.target == null) { config.target = `/etc/wireguard/${config.interface}.conf`; diff --git a/packages/file/lib/types/yum_repo/index.js b/packages/file/lib/types/yum_repo/index.js index 4027a784a..5e4f77a96 100644 --- a/packages/file/lib/types/yum_repo/index.js +++ b/packages/file/lib/types/yum_repo/index.js @@ -1,26 +1,22 @@ // Dependencies -const path = require('path'); -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import path from "node:path"; +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { - handler: async function({config}) { +export default { + handler: async function ({ config }) { // Set the target directory to yum's default path if target is a file name config.target = path.resolve("/etc/yum.repos.d", config.target); - await this.file.ini( - { - parse: utils.ini.parse_multi_brackets, - }, - config, - { - // Dont escape the section's header, headers are only one level and - // contains versions with dots. - escape: false, - } - ); + await this.file.ini({ + parse: utils.ini.parse_multi_brackets, + ...config, + // Dont escape the section's header, headers are only one level and + // contains versions with dots. + escape: false, + }); }, metadata: { - definitions: definitions - } + definitions: definitions, + }, }; diff --git a/packages/file/lib/types/yum_repo/schema.json b/packages/file/lib/types/yum_repo/schema.json index a849046cb..6ee4b04f0 100644 --- a/packages/file/lib/types/yum_repo/schema.json +++ b/packages/file/lib/types/yum_repo/schema.json @@ -1,7 +1,7 @@ { "config": { "type": "object", - "$ref": "module://@nikitajs/file/lib/ini/index#/definitions/config", + "$ref": "module://@nikitajs/file/ini#/definitions/config", "properties": { "target": { "type": "string", diff --git a/packages/file/lib/upload/index.js b/packages/file/lib/upload/index.js index e67c11e52..599247786 100644 --- a/packages/file/lib/upload/index.js +++ b/packages/file/lib/upload/index.js @@ -1,11 +1,11 @@ // Dependencies -const fs = require('fs'); -const path = require('path'); -const utils = require('../utils'); -const definitions = require('./schema.json'); +import fs from 'node:fs' +import path from 'node:path' +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { const algo = config.sha1 != null ? "sha1" : "md5"; log({ @@ -89,12 +89,10 @@ module.exports = { ? { message: `Hash matches as '${hash_source}'`, level: "INFO", - module: "nikita/lib/file/download", } : { message: `Hash dont match, source is '${hash_source}' and target is '${hash_target}'`, level: "WARN", - module: "nikita/lib/file/download", } ); return !match; diff --git a/packages/file/lib/upload/schema.json b/packages/file/lib/upload/schema.json index 15875551b..2be5b4a15 100644 --- a/packages/file/lib/upload/schema.json +++ b/packages/file/lib/upload/schema.json @@ -25,7 +25,7 @@ "description": "Name of the marker from where the content will be replaced." }, "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/gid" }, "md5": { "type": [ @@ -36,7 +36,7 @@ "description": "Validate uploaded file with md5 checksum (only for binary upload for\nnow), may be the string checksum or will be deduced from source if\n\"true\"." }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chmod#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/core/actions/fs/chmod#/definitions/config/properties/mode" }, "sha1": { "default": false, @@ -62,7 +62,7 @@ "description": "File path where to write content to. Pass the content." }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/file/lib/utils/diff.js b/packages/file/lib/utils/diff.js index 823487cfa..d0b31649b 100644 --- a/packages/file/lib/utils/diff.js +++ b/packages/file/lib/utils/diff.js @@ -1,27 +1,26 @@ /* - # Diff Report the difference between 2 strings. - */ // ## Dependencies -const pad = require('pad'); -const diff = require('diff'); -const string = require('@nikitajs/core/lib/utils/string'); +import pad from "pad"; +import { diffLines } from "diff"; +import string from '@nikitajs/core/utils/string'; // Utils -module.exports = function(oldStr, newStr) { +export default function(oldStr, newStr) { if (oldStr == null) { oldStr = ''; } if (newStr == null) { newStr = ''; } - const lines = diff.diffLines(oldStr, newStr); + const lines = diffLines(oldStr, newStr); let text = []; - let count_added = count_removed = 0; + let count_added = 0; + let count_removed = 0; const padsize = Math.ceil(lines.length / 10); for (const line of lines) { if (line.value === null) { diff --git a/packages/file/lib/utils/hfile.js b/packages/file/lib/utils/hfile.js index 8e18c7285..ebb4aeb8f 100644 --- a/packages/file/lib/utils/hfile.js +++ b/packages/file/lib/utils/hfile.js @@ -1,8 +1,8 @@ -const xmldom = require('xmldom'); -const builder = require('xmlbuilder'); +import xmldom from 'xmldom'; +import builder from 'xmlbuilder'; -module.exports = { +export default { /* `parse(xml, [property])` @@ -61,8 +61,7 @@ module.exports = { ``` */ stringify: function(properties) { - var i, j, k, ks, len, len1, markup, name, property, value; - markup = builder.create('configuration', { + const markup = builder.create('configuration', { version: '1.0', encoding: 'UTF-8' }); @@ -70,18 +69,15 @@ module.exports = { properties.sort(function(el1, el2) { return el1.name > el2.name; }); - for (i = 0, len = properties.length; i < len; i++) { - ({name, value} = properties[i]); - property = markup.ele('property'); + for (const {name, value} of properties) { + const property = markup.ele('property'); property.ele('name', name); property.ele('value', value); } } else { - ks = Object.keys(properties); - ks.sort(); - for (j = 0, len1 = ks.length; j < len1; j++) { - k = ks[j]; - property = markup.ele('property'); + const ks = Object.keys(properties).sort(); + for (const k of ks) { + const property = markup.ele('property'); property.ele('name', k); property.ele('value', properties[k]); } diff --git a/packages/file/lib/utils/index.js b/packages/file/lib/utils/index.js index 57c8795a4..e9f5ba43f 100644 --- a/packages/file/lib/utils/index.js +++ b/packages/file/lib/utils/index.js @@ -1,11 +1,13 @@ -const utils = require('@nikitajs/core/lib/utils'); -const diff = require('./diff'); -const hfile = require('./hfile'); -const ini = require('./ini'); -const partial = require('./partial'); +import utils from "@nikitajs/core/utils"; +import diff from '@nikitajs/file/utils/diff'; +import hfile from '@nikitajs/file/utils/hfile'; +import ini from '@nikitajs/file/utils/ini'; +import partial from '@nikitajs/file/utils/partial'; -module.exports = { +export { diff, hfile, ini, partial }; + +export default { ...utils, diff: diff, hfile: hfile, diff --git a/packages/file/lib/utils/ini.js b/packages/file/lib/utils/ini.js index 6497f25da..c8e03acee 100644 --- a/packages/file/lib/utils/ini.js +++ b/packages/file/lib/utils/ini.js @@ -1,392 +1,463 @@ - // Dependencies -const ini = require('ini'); -const utils = require('@nikitajs/core/lib/utils'); +import ini from "ini"; +import utils from "@nikitajs/core/utils"; -// Utils -module.exports = { - // Remove undefined and null values - safe: function(val) { - if (typeof val !== "string" || val.match(/[\r\n]/) || val.match(/^\[/) || (val.length > 1 && val.charAt(0) === "\"" && val.slice(-1) === "\"") || val !== val.trim()) { - return JSON.stringify(val); - } else { - return val.replace(/;/g, '\\;'); +// Remove undefined and null values +const safe = function (val) { + if ( + typeof val !== "string" || + val.match(/[\r\n]/) || + val.match(/^\[/) || + (val.length > 1 && val.charAt(0) === '"' && val.slice(-1) === '"') || + val !== val.trim() + ) { + return JSON.stringify(val); + } else { + return val.replace(/;/g, "\\;"); + } +}; + +const split_by_dots = function (str) { + return str + .replace(/\\1/g, "\\2LITERAL\\1LITERAL\\2") + .replace(/\\\./g, "\\1") + .split(/\./) + .map(function (part) { + return part + .replace(/\\1/g, ".") + .replace(/\\2LITERAL\.LITERAL\\2/g, "\\1"); + }); +}; + +const parse = function (content) { + return ini.parse(content); +}; + +const parse_brackets_then_curly = function (str, options = {}) { + const data = {}; + let current = data; + let stack = [current]; + const comment = options.comment || ";"; + utils.string.lines(str).forEach(function (line) { + if (!line || line.match(/^\s*$/)) { + return; } - }, - split_by_dots: function (str) { - return str - .replace(/\\1/g, "\\2LITERAL\\1LITERAL\\2") - .replace(/\\\./g, "\\1") - .split(/\./) - .map(function (part) { - return part - .replace(/\\1/g, ".") - .replace(/\\2LITERAL\.LITERAL\\2/g, "\\1"); - }); - }, - parse: function(content) { - return ini.parse(content); - }, - parse_brackets_then_curly: function(str, options = {}) { - const data = {}; - let current = data; - let stack = [current]; - const comment = options.comment || ';'; - utils.string.lines(str).forEach(function(line) { - if (!line || line.match(/^\s*$/)) { - return; + // Category level 1 + let match; + if ((match = line.match(/^\s*\[(.+?)\]\s*$/))) { + const key = match[1]; + current = data[key] = {}; + return (stack = [current]); + } else if ((match = line.match(/^\s*(.+?)\s*=\s*\{\s*$/))) { + // Add a child + const parent = stack[stack.length - 1]; + parent[match[1]] = current = {}; + return stack.push(current); + } else if ((match = line.match(/^\s*\}\s*$/))) { + if (stack.length === 0) { + throw Error('Invalid Syntax: found extra "}"'); } - // Category level 1 - let match; - if ((match = line.match(/^\s*\[(.+?)\]\s*$/))) { - const key = match[1]; - current = data[key] = {}; - return (stack = [current]); - } else if ((match = line.match(/^\s*(.+?)\s*=\s*\{\s*$/))) { - // Add a child - const parent = stack[stack.length - 1]; - parent[match[1]] = current = {}; - return stack.push(current); - } else if ((match = line.match(/^\s*\}\s*$/))) { - if (stack.length === 0) { - throw Error('Invalid Syntax: found extra "}"'); - } - stack.pop(); - return (current = stack[stack.length - 1]); - // comment - } else if ( - comment && - (match = line.match(RegExp(`^\\s*(${comment}.*)$`))) - ) { - return (current[match[1]] = null); - // key value - } else if ((match = line.match(/^\s*(.+?)\s*=\s*(.+)\s*$/))) { - let textmatch; - if ((textmatch = match[2].match(/^"(.*)"$/))) { - match[2] = textmatch[1].replace('\\"', '"'); - } - return (current[match[1]] = match[2]); - // else - } else if ((match = line.match(/^\s*(.+?)\s*$/))) { - return (current[match[1]] = null); + stack.pop(); + return (current = stack[stack.length - 1]); + // comment + } else if ( + comment && + (match = line.match(RegExp(`^\\s*(${comment}.*)$`))) + ) { + return (current[match[1]] = null); + // key value + } else if ((match = line.match(/^\s*(.+?)\s*=\s*(.+)\s*$/))) { + let textmatch; + if ((textmatch = match[2].match(/^"(.*)"$/))) { + match[2] = textmatch[1].replace('\\"', '"'); } - }); - return data; - }, - /* + return (current[match[1]] = match[2]); + // else + } else if ((match = line.match(/^\s*(.+?)\s*$/))) { + return (current[match[1]] = null); + } + }); + return data; +}; + +/* + +Each category is surrounded by one or several square brackets. The number of brackets indicates +the depth of the category. - Each category is surrounded by one or several square brackets. The number of brackets indicates - the depth of the category. - - Options are: +Options are: - - `comment` Default to ";" +- `comment` Default to ";" - */ - parse_multi_brackets: function(str, options = {}) { - const data = {}; - let current = data; - const stack = [current]; - const comment = options.comment || ';'; - utils.string.lines(str).forEach(function(line) { - let match; - if (!line || line.match(/^\s*$/)) { - return; +*/ +const parse_multi_brackets = function (str, options = {}) { + const data = {}; + let current = data; + const stack = [current]; + const comment = options.comment || ";"; + utils.string.lines(str).forEach(function (line) { + let match; + if (!line || line.match(/^\s*$/)) { + return; + } + // Category + if ((match = line.match(/^\s*(\[+)(.+?)(\]+)\s*$/))) { + const depth = match[1].length; + // Add a child + if (depth === stack.length) { + const parent = stack[depth - 1]; + parent[match[2]] = current = {}; + stack.push(current); + } + // Invalid child hierarchy + if (depth > stack.length) { + throw Error(`Invalid child ${match[2]}`); + } + // Move up or at the same level + if (depth < stack.length) { + stack.splice(depth, stack.length - depth); + const parent = stack[depth - 1]; + parent[match[2]] = current = {}; + return stack.push(current); } - // Category - if (match = line.match(/^\s*(\[+)(.+?)(\]+)\s*$/)) { - const depth = match[1].length; - // Add a child - if (depth === stack.length) { - const parent = stack[depth - 1]; - parent[match[2]] = current = {}; - stack.push(current); - } - // Invalid child hierarchy - if (depth > stack.length) { - throw Error(`Invalid child ${match[2]}`); - } - // Move up or at the same level - if (depth < stack.length) { - stack.splice(depth, stack.length - depth); - const parent = stack[depth - 1]; - parent[match[2]] = current = {}; - return stack.push(current); - } // comment - } else if (comment && (match = line.match(RegExp(`^\\s*(${comment}.*)$`)))) { - return current[match[1]] = null; + } else if ( + comment && + (match = line.match(RegExp(`^\\s*(${comment}.*)$`))) + ) { + return (current[match[1]] = null); // key value - } else if (match = line.match(/^\s*(.+?)\s*=\s*(.+)\s*$/)) { - return current[match[1]] = match[2]; + } else if ((match = line.match(/^\s*(.+?)\s*=\s*(.+)\s*$/))) { + return (current[match[1]] = match[2]); // else - } else if (match = line.match(/^\s*(.+?)\s*$/)) { - return current[match[1]] = null; - } - }); - return data; - }, - /* - Same as the parse_multi_brackets instead it takes in count values which are defined on several lines - As an example the ambari-agent .ini configuration file + } else if ((match = line.match(/^\s*(.+?)\s*$/))) { + return (current[match[1]] = null); + } + }); + return data; +}; + +/* +Same as the parse_multi_brackets instead it takes in count values which are defined on several lines +As an example the ambari-agent .ini configuration file - * `comment` Default to ";" +* `comment` Default to ";" - */ - parse_multi_brackets_multi_lines: function(str, options = {}) { - const data = {}; - let current = data; - const stack = [current]; - const comment = options.comment || ';'; - let writing = false; - let previous = {}; - utils.string.lines(str).forEach(function(line, _, __) { - if (!line || line.match(/^\s*$/)) { - return; +*/ +const parse_multi_brackets_multi_lines = function (str, options = {}) { + const data = {}; + let current = data; + const stack = [current]; + const comment = options.comment || ";"; + let writing = false; + let previous = {}; + utils.string.lines(str).forEach(function (line, _, __) { + if (!line || line.match(/^\s*$/)) { + return; + } + let match, parent; + // Category + if ((match = line.match(/^\s*(\[+)(.+?)(\]+)\s*$/))) { + const depth = match[1].length; + // Add a child + if (depth === stack.length) { + parent = stack[depth - 1]; + parent[match[2]] = current = {}; + stack.push(current); + } + // Invalid child hierarchy + if (depth > stack.length) { + throw Error(`Invalid child ${match[2]}`); + } + // Move up or at the same level + if (depth < stack.length) { + stack.splice(depth, stack.length - depth); + parent = stack[depth - 1]; + parent[match[2]] = current = {}; + return stack.push(current); } - let match, parent; - // Category - if (match = line.match(/^\s*(\[+)(.+?)(\]+)\s*$/)) { - const depth = match[1].length; - // Add a child - if (depth === stack.length) { - parent = stack[depth - 1]; - parent[match[2]] = current = {}; - stack.push(current); - } - // Invalid child hierarchy - if (depth > stack.length) { - throw Error(`Invalid child ${match[2]}`); - } - // Move up or at the same level - if (depth < stack.length) { - stack.splice(depth, stack.length - depth); - parent = stack[depth - 1]; - parent[match[2]] = current = {}; - return stack.push(current); - } // comment - } else if (comment && (match = line.match(RegExp(`^\\s*(${comment}.*)$`)))) { - writing = false; - return current[match[1]] = null; + } else if ( + comment && + (match = line.match(RegExp(`^\\s*(${comment}.*)$`))) + ) { + writing = false; + return (current[match[1]] = null); // key value - } else if (match = line.match(/^\s*(.+?)\s*=\s*(.+)\s*$/)) { - writing = false; - current[match[1]] = match[2]; - previous = match[1]; - return writing = true; + } else if ((match = line.match(/^\s*(.+?)\s*=\s*(.+)\s*$/))) { + writing = false; + current[match[1]] = match[2]; + previous = match[1]; + return (writing = true); // else - } else if (match = line.match(/^\s*(.+?)\s*$/)) { - if (writing) { - return current[previous] += match[1]; - } else { - return current[match[1]] = null; - } + } else if ((match = line.match(/^\s*(.+?)\s*$/))) { + if (writing) { + return (current[previous] += match[1]); + } else { + return (current[match[1]] = null); } - }); - return data; - }, - // same as ini parse but transform value which are true and type of true as '' - // to be user by stringify_single_key - stringify: function(obj, section, options = {}) { - if (arguments.length === 2) { - options = section; - section = undefined; } - if (options.separator == null) { - options.separator = ' = '; - } - if (options.eol == null) { - options.eol = !options.ssh && process.platform === "win32" ? "\r\n" : "\n"; - } - if (options.escape == null) { - options.escape = true; - } - const safe = module.exports.safe; - const split_by_dots = module.exports.split_by_dots; - const children = []; - let out = ""; - Object.keys(obj).forEach(function(k) { - const val = obj[k]; - if (Array.isArray(val)) { - return val.forEach(function(item) { - return out += safe(`${k}[]`) + options.separator + safe(item) + options.eol; - }); - } else if (val && typeof val === "object") { - return children.push(k); - } else if (typeof val === 'boolean') { - if (val === true) { - return out += safe(k) + options.eol; - } else { + }); + return data; +}; - } +// same as ini parse but transform value which are true and type of true as '' +// to be user by stringify_single_key +const stringify = function (obj, section, options = {}) { + if (arguments.length === 2) { + options = section; + section = undefined; + } + if (options.separator == null) { + options.separator = " = "; + } + if (options.eol == null) { + options.eol = !options.ssh && process.platform === "win32" ? "\r\n" : "\n"; + } + if (options.escape == null) { + options.escape = true; + } + const children = []; + let out = ""; + Object.keys(obj).forEach(function (k) { + const val = obj[k]; + if (Array.isArray(val)) { + return val.forEach(function (item) { + return (out += + safe(`${k}[]`) + options.separator + safe(item) + options.eol); + }); + } else if (val && typeof val === "object") { + return children.push(k); + } else if (typeof val === "boolean") { + if (val === true) { + return (out += safe(k) + options.eol); } else { - // disregard false value - return out += safe(k) + options.separator + safe(val) + options.eol; } - }); - if (section && out.length) { - out = "[" + safe(section) + "]" + options.eol + out; + } else { + // disregard false value + return (out += safe(k) + options.separator + safe(val) + options.eol); } - children.forEach(function(k) { - // escape the section name dot as some daemon could not parse it - const nk = options.escape ? split_by_dots(k).join('\\.') : k; - const child = module.exports.stringify(obj[k], (section ? section + "." : "") + nk, options); - if (out.length && child.length) { - out += options.eol; - } - return out += child; - }); - return out; - }, - // works like stringify but write only the key when the value is '' - // be careful when using ini.parse is parses single key line as key = true - stringify_single_key: function(obj, section, options = {}) { - if (arguments.length === 2) { - options = section; - section = undefined; + }); + if (section && out.length) { + out = "[" + safe(section) + "]" + options.eol + out; + } + children.forEach(function (k) { + // escape the section name dot as some daemon could not parse it + const nk = options.escape ? split_by_dots(k).join("\\.") : k; + const child = stringify( + obj[k], + (section ? section + "." : "") + nk, + options + ); + if (out.length && child.length) { + out += options.eol; } - if (options.separator == null) { - options.separator = ' = '; + return (out += child); + }); + return out; +}; + +// works like stringify but write only the key when the value is '' +// be careful when using ini.parse is parses single key line as key = true +const stringify_single_key = function (obj, section, options = {}) { + if (arguments.length === 2) { + options = section; + section = undefined; + } + if (options.separator == null) { + options.separator = " = "; + } + if (options.eol == null) { + options.eol = !options.ssh && process.platform === "win32" ? "\r\n" : "\n"; + } + const children = []; + let out = ""; + Object.keys(obj).forEach(function (k) { + const val = obj[k]; + if (val && Array.isArray(val)) { + return val.forEach(function (item) { + return (out += + val === "" || val === true + ? `${k}` + "\n" + : safe(`${k}[]`) + options.separator + safe(item) + "\n"); + }); + } else if (val && typeof val === "object") { + return children.push(k); + } else { + return (out += + val === "" || val === true + ? `${k}` + options.eol + : safe(k) + options.separator + safe(val) + options.eol); } - if (options.eol == null) { - options.eol = !options.ssh && process.platform === "win32" ? "\r\n" : "\n"; + }); + if (section && out.length) { + out = "[" + safe(section) + "]" + options.eol + out; + } + children.forEach(function (k) { + const nk = split_by_dots(k).join("\\."); + const child = stringify_single_key( + obj[k], + (section ? section + "." : "") + nk, + options + ); + if (out.length && child.length) { + out += options.eol; } - const safe = module.exports.safe; - const split_by_dots = module.exports.split_by_dots; - const children = []; - let out = ""; - Object.keys(obj).forEach(function(k) { - const val = obj[k]; - if (val && Array.isArray(val)) { - return val.forEach(function(item) { - return out += val === '' || val === true ? `${k}` + "\n" : safe(`${k}[]`) + options.separator + safe(item) + "\n"; - }); - } else if (val && typeof val === "object") { - return children.push(k); + return (out += child); + }); + return out; +}; + +const stringify_brackets_then_curly = function ( + content, + depth = 0, + options = {} +) { + if (arguments.length === 2) { + options = depth; + depth = 0; + } + if (options.separator == null) { + options.separator = " = "; + } + if (options.eol == null) { + options.eol = !options.ssh && process.platform === "win32" ? "\r\n" : "\n"; + } + let out = ""; + const indent = " "; + const prefix = indent.repeat(depth); + for (const k in content) { + const v = content[k]; + // isUndefined = typeof v is 'undefined' + const isBoolean = typeof v === "boolean"; + const isNull = v === null; + const isArray = Array.isArray(v); + const isObj = typeof v === "object" && !isNull && !isArray; + if (isObj) { + if (depth === 0) { + out += `${prefix}[${k}]${options.eol}`; + out += stringify_brackets_then_curly( + v, + depth + 1, + options + ); + out += `${options.eol}`; } else { - return out += val === '' || val === true ? `${k}` + options.eol : safe(k) + options.separator + safe(val) + options.eol; - } - }); - if (section && out.length) { - out = "[" + safe(section) + "]" + options.eol + out; - } - children.forEach(function(k) { - const nk = split_by_dots(k).join('\\.'); - const child = module.exports.stringify_single_key(obj[k], (section ? section + "." : "") + nk, options); - if (out.length && child.length) { - out += options.eol; + out += `${prefix}${k}${options.separator}{${options.eol}`; + out += stringify_brackets_then_curly( + v, + depth + 1, + options + ); + out += `${prefix}}${options.eol}`; } - return out += child; - }); - return out; - }, - stringify_brackets_then_curly: function(content, depth = 0, options = {}) { - if (arguments.length === 2) { - options = depth; - depth = 0; - } - if (options.separator == null) { - options.separator = ' = '; - } - if (options.eol == null) { - options.eol = !options.ssh && process.platform === "win32" ? "\r\n" : "\n"; - } - let out = ''; - const indent = ' '; - const prefix = indent.repeat(depth); - for (const k in content) { - const v = content[k]; - // isUndefined = typeof v is 'undefined' - const isBoolean = typeof v === 'boolean'; - const isNull = v === null; - const isArray = Array.isArray(v); - const isObj = typeof v === 'object' && !isNull && !isArray; - if (isObj) { - if (depth === 0) { - out += `${prefix}[${k}]${options.eol}`; - out += module.exports.stringify_brackets_then_curly(v, depth + 1, options); - out += `${options.eol}`; - } else { - out += `${prefix}${k}${options.separator}{${options.eol}`; - out += module.exports.stringify_brackets_then_curly(v, depth + 1, options); - out += `${prefix}}${options.eol}`; - } + } else { + if (isArray) { + out += v + .map((v) => `${prefix}${k}${options.separator}${v}`) + .join(`${options.eol}`); + } else if (isNull) { + out += `${prefix}${k}${options.separator}null`; + } else if (isBoolean) { + out += `${prefix}${k}${options.separator}${v ? "true" : "false"}`; } else { - if (isArray) { - out += v - .map((v) => `${prefix}${k}${options.separator}${v}`) - .join(`${options.eol}`); - } else if (isNull) { - out += `${prefix}${k}${options.separator}null`; - } else if (isBoolean) { - out += `${prefix}${k}${options.separator}${v ? 'true' : 'false'}`; - } else { - out += `${prefix}${k}${options.separator}${v}`; - } - out += `${options.eol}`; + out += `${prefix}${k}${options.separator}${v}`; } + out += `${options.eol}`; } - return out; - }, - /* - Each category is surrounded by one or several square brackets. The number of brackets indicates - the depth of the category. - Taking now indent option into consideration: some file are indent aware ambari-agent .ini file - */ - stringify_multi_brackets: function(content, depth = 0, options = {}) { - if (arguments.length === 2) { - options = depth; - depth = 0; - } - if (options.separator == null) { - options.separator = ' = '; - } - if (options.eol == null) { - options.eol = !options.ssh && process.platform === "win32" ? "\r\n" : "\n"; + } + return out; +}; + +/* +Each category is surrounded by one or several square brackets. The number of brackets indicates +the depth of the category. +Taking now indent option into consideration: some file are indent aware ambari-agent .ini file +*/ +const stringify_multi_brackets = function (content, depth = 0, options = {}) { + if (arguments.length === 2) { + options = depth; + depth = 0; + } + if (options.separator == null) { + options.separator = " = "; + } + if (options.eol == null) { + options.eol = !options.ssh && process.platform === "win32" ? "\r\n" : "\n"; + } + let out = ""; + const indent = options.indent != null ? options.indent : " "; + const prefix = indent.repeat(depth); + for (const k in content) { + const v = content[k]; + const isBoolean = typeof v === "boolean"; + const isNull = v === null; + const isArray = Array.isArray(v); + const isObj = typeof v === "object" && !isArray && !isNull; + if (isObj) { + continue; } - let out = ''; - const indent = options.indent != null ? options.indent : ' '; - const prefix = indent.repeat(depth); - for (const k in content) { - const v = content[k]; - const isBoolean = typeof v === 'boolean'; - const isNull = v === null; - const isArray = Array.isArray(v); - const isObj = typeof v === 'object' && !isArray && !isNull; - if (isObj) { - continue; - } - if (isNull) { - out += `${prefix}${k}`; - } else if (isBoolean) { - out += `${prefix}${k}${options.separator}${v ? 'true' : 'false'}`; - } else if (isArray) { - out += v.filter(function(vv) { + if (isNull) { + out += `${prefix}${k}`; + } else if (isBoolean) { + out += `${prefix}${k}${options.separator}${v ? "true" : "false"}`; + } else if (isArray) { + out += v + .filter(function (vv) { return vv != null; - }).map(function(vv) { - if (typeof vv !== 'string') { - throw Error(`Stringify Invalid Value: expect a string for key ${k}, got ${vv}`); + }) + .map(function (vv) { + if (typeof vv !== "string") { + throw Error( + `Stringify Invalid Value: expect a string for key ${k}, got ${vv}` + ); } return `${prefix}${k}${options.separator}${vv}`; - }).join(options.eol); - } else { - out += `${prefix}${k}${options.separator}${v}`; - } - out += `${options.eol}`; + }) + .join(options.eol); + } else { + out += `${prefix}${k}${options.separator}${v}`; } - for (const k in content) { - const v = content[k]; - const isNull = v === null; - const isArray = Array.isArray(v); - const isObj = typeof v === 'object' && !isArray && !isNull; - if (!isObj) { - continue; - } - // out += "#{prefix}#{utils.string.repeat '[', depth+1}#{k}#{utils.string.repeat ']', depth+1}#{options.eol}" - out += `${prefix}${'['.repeat(depth + 1)}${k}${']'.repeat(depth + 1)}${options.eol}`; - out += module.exports.stringify_multi_brackets(v, depth + 1, options); + out += `${options.eol}`; + } + for (const k in content) { + const v = content[k]; + const isNull = v === null; + const isArray = Array.isArray(v); + const isObj = typeof v === "object" && !isArray && !isNull; + if (!isObj) { + continue; } - return out; + // out += "#{prefix}#{utils.string.repeat '[', depth+1}#{k}#{utils.string.repeat ']', depth+1}#{options.eol}" + out += `${prefix}${"[".repeat(depth + 1)}${k}${"]".repeat(depth + 1)}${ + options.eol + }`; + out += stringify_multi_brackets(v, depth + 1, options); } + return out; +}; + +export { + safe, + split_by_dots, + parse, + parse_brackets_then_curly, + parse_multi_brackets, + parse_multi_brackets_multi_lines, + stringify, + stringify_single_key, + stringify_brackets_then_curly, +}; + +export default { + safe: safe, + split_by_dots: split_by_dots, + parse: parse, + parse_brackets_then_curly: parse_brackets_then_curly, + parse_multi_brackets: parse_multi_brackets, + parse_multi_brackets_multi_lines: parse_multi_brackets_multi_lines, + stringify: stringify, + stringify_single_key: stringify_single_key, + stringify_brackets_then_curly: stringify_brackets_then_curly, + stringify_multi_brackets: stringify_multi_brackets, }; diff --git a/packages/file/lib/utils/partial.js b/packages/file/lib/utils/partial.js index 08bb56826..7e78475e2 100644 --- a/packages/file/lib/utils/partial.js +++ b/packages/file/lib/utils/partial.js @@ -7,10 +7,10 @@ Replace partial elements in a text. */ // Dependencies -const utils = require('@nikitajs/core/lib/utils'); +import utils from "@nikitajs/core/utils"; // Utils -module.exports = function(config, log) { +export default function(config, log) { if(!config.write?.length > 0) return; log({ message: "Replacing sections of the file", @@ -51,6 +51,7 @@ module.exports = function(config, log) { }); let posoffset = 0; const orgContent = config.content; + let res; while ((res = opts.place_before.exec(orgContent)) !== null) { log({ message: "Before regexp found a match", @@ -87,6 +88,7 @@ module.exports = function(config, log) { }); let posoffset = 0; const orgContent = config.content; + let res; while ((res = opts.append.exec(orgContent)) !== null) { log({ message: "Append regexp found a match", diff --git a/packages/file/lib/yaml/index.js b/packages/file/lib/yaml/index.js index 3ed4bf7e4..eba9bd0b1 100644 --- a/packages/file/lib/yaml/index.js +++ b/packages/file/lib/yaml/index.js @@ -1,11 +1,11 @@ // Dependencies -const utils = require("../utils"); -const yaml = require("js-yaml"); -const { merge } = require("mixme"); -const definitions = require("./schema.json"); +import utils from "@nikitajs/file/utils"; +import yaml from "js-yaml"; +import { merge } from "mixme"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { try { if (config.merge) { diff --git a/packages/file/lib/yaml/schema.json b/packages/file/lib/yaml/schema.json index 87f8fb5df..8ea340b08 100644 --- a/packages/file/lib/yaml/schema.json +++ b/packages/file/lib/yaml/schema.json @@ -29,7 +29,7 @@ "description": "Replace from after this marker, a string or a regular expression." }, "gid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/gid" }, "indent": { "type": "integer", @@ -63,7 +63,7 @@ "description": "Read the target if it exists and merge its content." }, "mode": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/file#/definitions/config/properties/mode" }, "replace": { "type": "array", @@ -109,7 +109,7 @@ "description": "Replace to before this marker, a string or a regular expression." }, "uid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/file/package.json b/packages/file/package.json index b31f87e83..9122e877c 100644 --- a/packages/file/package.json +++ b/packages/file/package.json @@ -77,6 +77,13 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + ".": "./lib/index.js", + "./register": "./lib/register.js", + "./utils": "./lib/utils/index.js", + "./utils/*": "./lib/utils/*.js", + "./*": "./lib/*/index.js" + }, "files": [ "/lib" ], @@ -87,17 +94,17 @@ "license": "MIT", "main": "./lib/index", "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/file/lib/register", - "@nikitajs/log/lib/register" - ], "inline-diffs": true, - "timeout": 10000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/file/register", + "@nikitajs/log/register", + "should" + ], + "throw-deprecation": true, + "timeout": 10000 }, "repository": { "type": "git", @@ -107,5 +114,6 @@ "scripts": { "test": "npm run test:local", "test:local": "mocha 'test/**/*.coffee'" - } + }, + "type": "module" } diff --git a/packages/file/test.sample.coffee b/packages/file/test.sample.coffee index b92e886a5..9d45e57a7 100644 --- a/packages/file/test.sample.coffee +++ b/packages/file/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: api: true posix: true @@ -13,5 +13,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/file/test/cache.file.coffee b/packages/file/test/cache.file.coffee index 27963234b..4aff74fdc 100644 --- a/packages/file/test/cache.file.coffee +++ b/packages/file/test/cache.file.coffee @@ -1,13 +1,13 @@ -http = require 'http' -path = require 'path' -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import http from 'node:http' +import path from 'node:path' +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.cache file', -> + return unless test.tags.posix they 'current cache file match provided hash', ({ssh}) -> nikita @@ -62,23 +62,24 @@ describe 'file.cache file', -> source: "#{tmpdir}/my_file" cache_dir: "#{tmpdir}/cache" md5: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - .should.be.rejectedWith message: "NIKITA_FILE_INVALID_TARGET_HASH: target \"#{tmpdir}/cache/my_file\" got df8fede7ff71608e24a5576326e41c75 instead of xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + .should.be.rejectedWith message: "NIKITA_FILE_INVALID_TARGET_HASH: target \"#{tmpdir}/cache/my_file\" got \"df8fede7ff71608e24a5576326e41c75\" instead of \"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\"." they 'into local cache_dir', ({ssh}) -> nikita $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> + await @file.touch "#{tmpdir}/a_file" {$status} = await @file.cache - source: "#{__filename}" + source: "#{tmpdir}/a_file" cache_dir: "#{tmpdir}/my_cache_dir" $status.should.be.true() {$status} = await @file.cache - source: "#{__filename}" + source: "#{tmpdir}/a_file" cache_dir: "#{tmpdir}/my_cache_dir" $status.should.be.false() @fs.assert - target: "#{tmpdir}/my_cache_dir/#{path.basename __filename}" + target: "#{tmpdir}/my_cache_dir/a_file" describe 'md5', -> @@ -119,4 +120,4 @@ describe 'file.cache file', -> source: "#{tmpdir}/source" cache_file: "#{tmpdir}/target" md5: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - .should.be.rejectedWith message: "NIKITA_FILE_INVALID_TARGET_HASH: target \"#{tmpdir}/target\" got df8fede7ff71608e24a5576326e41c75 instead of xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + .should.be.rejectedWith message: "NIKITA_FILE_INVALID_TARGET_HASH: target \"#{tmpdir}/target\" got \"df8fede7ff71608e24a5576326e41c75\" instead of \"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\"." diff --git a/packages/file/test/cache.http.coffee b/packages/file/test/cache.http.coffee index e88c348e7..76854344c 100644 --- a/packages/file/test/cache.http.coffee +++ b/packages/file/test/cache.http.coffee @@ -1,13 +1,13 @@ -http = require 'http' -path = require 'path' -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import http from 'node:http' +import path from 'node:path' +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.cache http', -> + return unless test.tags.posix portincr = 22345 server = -> @@ -141,7 +141,7 @@ describe 'file.cache http', -> source: "http://localhost:#{srv.port}/missing" cache_dir: "#{tmpdir}/cache" md5: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - .should.be.rejectedWith message: "NIKITA_FILE_INVALID_TARGET_HASH: target \"#{tmpdir}/cache/missing\" got 9e076f5885f5cc16a4b5aeb8de4adff5 instead of xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + .should.be.rejectedWith message: "NIKITA_FILE_INVALID_TARGET_HASH: target \"#{tmpdir}/cache/missing\" got \"9e076f5885f5cc16a4b5aeb8de4adff5\" instead of \"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\"." finally srv.close() @@ -184,6 +184,6 @@ describe 'file.cache http', -> source: "http://localhost:#{srv.port}/my_file" cache_file: "#{tmpdir}/target" md5: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - .should.be.rejectedWith message: "NIKITA_FILE_INVALID_TARGET_HASH: target \"#{tmpdir}/target\" got df8fede7ff71608e24a5576326e41c75 instead of xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + .should.be.rejectedWith message: "NIKITA_FILE_INVALID_TARGET_HASH: target \"#{tmpdir}/target\" got \"df8fede7ff71608e24a5576326e41c75\" instead of \"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\"." finally srv.close() diff --git a/packages/file/test/cson.coffee b/packages/file/test/cson.coffee index f98a3e3dc..6909cead1 100644 --- a/packages/file/test/cson.coffee +++ b/packages/file/test/cson.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.cson', -> + return unless test.tags.posix they 'stringify content to target', ({ssh}) -> nikita diff --git a/packages/file/test/download.zip b/packages/file/test/download.zip deleted file mode 100644 index 430cf0ac812f076f7ec5451687b53943c0d6bba5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1506 zcmV<81s(cOO9KQH0{{RB02q(mMpU+g&|(Du00Aig01p5F0Az1>ZftL1WG-WGW@cq& z?OIE7(=ZUe=U42+2bY=PJZK4I1_p-r0S-LQ9ob&R9vmrHGNr@t-&x6zI(D!FG$93Y zXyWxsTJ86JYf0`P(s~UKAjMBvoFcg9+8wN!j_Ou(?G9ELFd=L9q34ZN8Vm;-m9G1H zV`Fg#I>O7kU8!hx+Zf#eSFIadxr0H7DoA6WY@xgmiDaQWg4+)V0|2_gbYKEAy?f>5 zBXh56W@k_}p+=Z~#+2HKEJ<>ck6fnsj8WSYfnW43;|#FjZF=#76q&|%x$T649Z|%p z<__lCyGUb=FOh{Xn@mRV)qNs`Mxk%Nzg(lc09Vr><8{Jf;eOj_i6R77eq@*K#`srWR`@UkDmNk#z+}R;N-lOX8^9el=$)ZykMtj5W|dvg`|~3j`Xosw*Hwje&+B zVq%Fuk^l&utk5P2*xgk>UqlKKv%hP&yVZdRp`FP+YP3R5??&htkaEu#72h`jZ4IS~cS7AWr@) zUSkmR7(=KPB|p8)!~d*mKEAeNDWpEn5g2acB4>#@*fXzAz20!Im37qZ4$RA) zA&6FT2Y&rIdYx->IqU`lfO0~uCUQQX9DpzXz^3EK78O6K}{GEQ!Oa8*#rhQ5z@Iz65F^E-F%2KVpy)9D$%KRxA~7qjRG8BE5#6)MG+c*Rj- z=&MEsN5Ew!LKuep9>);poX@9sC--o6%FfQ_ckj&ZQZ&K*Y)Hvh8VzMR4ErJ-wpFmn zi-1PeFg2VIcwu=D5mThAG{rzy@%`sU!HqtA`nl0x-1r`qP$ixt^#yw~I_Q7eCIc}>Q{q>e2^JKal)Ax9p#C|Y-`|R=4*UvnA;hMHT zyI*j2+pRc1c8lY=3(xv`XKFp#`-9zg1WwPA|HYdRZr!baac^C_ zGTE8UT{GIB)txfincae??Fw9%2Ti)vM^%>F`XH}LAngj>tg6PH)Ua7xk8+#bEviGD zuoOGyuir*se)%l;xVLR{Ln7vq#p0RScw8cj+-;>5s<4cCA9*)2JXqd&&+E8~`kZBn z1}(2%G|Ei>ga)pI{N3a{j6(JY#a|+Un{`~&A%XrkK!4`g^jA;icUj#pIz)I3?|`aB z^X^CS&R6rVg`Urpoj24{aII(a&FCsBeFsoW0Rle*6axSN2mlz5-9}WlgV16H0003g z000jF0000000000Ab_NS00000WN&wFY;R#?E@N+IW@TkiO9ci1000010096!0001w I1poj503LA7t^fc4 diff --git a/packages/file/test/download_file.coffee b/packages/file/test/download_file.coffee index a9150d28d..0af4b5bae 100644 --- a/packages/file/test/download_file.coffee +++ b/packages/file/test/download_file.coffee @@ -1,12 +1,12 @@ -path = require 'path' -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import path from 'node:path' +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.download file', -> + return unless test.tags.posix describe 'source', -> @@ -15,15 +15,18 @@ describe 'file.download file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.download - source: "file://#{__filename}" + await @file + content: 'Where is my precious?' + target: "#{tmpdir}/a_file" + await @file.download + source: "file://#{tmpdir}/a_file" target: "#{tmpdir}/download_test" # Download a non existing file .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/download_test" - content: /yeah/ - @file.download - source: "file://#{__filename}" + content: /precious/ + await @file.download + source: "file://#{tmpdir}/a_file" target: "#{tmpdir}/download_test" # Download on an existing file .should.be.finally.containEql $status: false @@ -33,15 +36,18 @@ describe 'file.download file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.download - source: "#{__filename}" + await @file + content: 'Where is my precious?' + target: "#{tmpdir}/a_file" + await @file.download + source: "#{tmpdir}/a_file" target: "#{tmpdir}/download_test" .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/download_test" - content: /yeah/ - @file.download # Download on an existing file - source: "#{__filename}" + content: /precious/ + await @file.download # Download on an existing file + source: "#{tmpdir}/a_file" target: "#{tmpdir}/download_test" .should.be.finally.containEql $status: false @@ -50,54 +56,44 @@ describe 'file.download file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.download - source: "#{__dirname}/doesnotexists" + await @file.download + source: "#{tmpdir}/doesnotexists" target: "#{tmpdir}/download_test" - .should.be.rejectedWith message: "NIKITA_FS_STAT_TARGET_ENOENT: failed to stat the target, no file exists for target, got \"#{__dirname}/doesnotexists\"" + .should.be.rejectedWith message: "NIKITA_FS_STAT_TARGET_ENOENT: failed to stat the target, no file exists for target, got \"#{tmpdir}/doesnotexists\"" they 'into an existing directory', ({ssh}) -> nikita $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @fs.mkdir + await @file.touch + target: "#{tmpdir}/a_file" + await @fs.mkdir target: "#{tmpdir}/download_test" - @file.download - source: "#{__filename}" + await @file.download + source: "#{tmpdir}/a_file" target: "#{tmpdir}/download_test" - @fs.assert - target: "#{tmpdir}/download_test/#{path.basename __filename}" + await @fs.assert + target: "#{tmpdir}/download_test/a_file" describe 'cache', -> - they 'validate md5', ({ssh}) -> - source = "#{__dirname}/download.zip" - nikita - $tmpdir: true - , ({metadata: {tmpdir}}) -> - @file.download - $ssh: ssh - source: source - target: "#{tmpdir}/download_test" - cache: true - cache_dir: "#{tmpdir}/cache_dir" - md5: '3f104676a5f72de08b811dbb725244ff' - .should.be.finally.containEql $status: true - @fs.assert "#{tmpdir}/cache_dir/#{path.basename source}" - they 'cache dir', ({ssh}) -> # Download a non existing file nikita $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.download + await @file + content: 'Where is my precious?' + target: "#{tmpdir}/a_file" + await @file.download $ssh: ssh - source: "#{__filename}" + source: "#{tmpdir}/a_file" target: "#{tmpdir}/download_test" cache: true cache_dir: "#{tmpdir}/cache_dir" .should.be.finally.containEql $status: true - @fs.assert "#{tmpdir}/cache_dir/#{path.basename __filename}" + await @fs.assert "#{tmpdir}/cache_dir/a_file" they 'detect file already present', ({ssh}) -> ssh = null @@ -105,22 +101,25 @@ describe 'file.download file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.download - source: "#{__filename}" + await @file + content: 'Where is my precious?' + target: "#{tmpdir}/a_file" + await @file.download + source: "#{tmpdir}/a_file" target: "#{tmpdir}/download_test" cache: true cache_dir: "#{tmpdir}/cache_dir" - @file.download - source: "#{__filename}" + await @file.download + source: "#{tmpdir}/a_file" target: "#{tmpdir}/download_test" cache: true cache_dir: "#{tmpdir}/cache_dir" .should.be.finally.containEql $status: false - @file + await @file content: 'abc' target: "#{tmpdir}/download_test" - @file.download - source: "#{__filename}" + await @file.download + source: "#{tmpdir}/a_file" target: "#{tmpdir}/download_test" cache: true cache_dir: "#{tmpdir}/cache_dir" @@ -128,28 +127,49 @@ describe 'file.download file', -> describe 'md5', -> - they 'cache dir with md5 string', ({ssh}) -> + they 'cache dir with valid md5', ({ssh}) -> # Download a non existing file nikita $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/a_file" - content: 'okay' - @file.download + content: 'Where is my precious?' + await @file.download source: "#{tmpdir}/a_file" target: "#{tmpdir}/download_test" cache: true cache_dir: "#{tmpdir}/cache_dir" - md5: 'df8fede7ff71608e24a5576326e41c75' + md5: '2317728a5e7fbd40c1acbe01378f0230' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/cache_dir/a_file" - content: 'okay' - @fs.assert + content: /precious/ + await @fs.assert + target: "#{tmpdir}/download_test" + content: /precious/ + + they 'cache dir with invalid md5', ({ssh}) -> + # Download a non existing file + nikita + $ssh: ssh + $tmpdir: true + , ({metadata: {tmpdir}}) -> + await @file + target: "#{tmpdir}/a_file" + content: 'Where is my precious?' + await @file.download + source: "#{tmpdir}/a_file" target: "#{tmpdir}/download_test" - content: 'okay' + cache: true + cache_dir: "#{tmpdir}/cache_dir" + md5: 'XXXXX' + .should.be.rejectedWith [ + 'NIKITA_FILE_INVALID_TARGET_HASH:' + "target \"#{tmpdir}/cache_dir/a_file\"" + 'got "2317728a5e7fbd40c1acbe01378f0230" instead of "XXXXX".' + ].join ' ' they 'is computed if true', ({ssh}) -> # Download with invalid checksum @@ -157,18 +177,18 @@ describe 'file.download file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @log.fs + await @log.fs basedir: tmpdir serializer: text: (log) -> "[#{log.level}] #{log.message}\n" - @file + await @file target: "#{tmpdir}/source" content: "okay" - @file.download + await @file.download source: "#{tmpdir}/source" target: "#{tmpdir}/check_md5" md5: true .should.be.finally.containEql $status: true - @file.download + await @file.download source: "#{tmpdir}/source" target: "#{tmpdir}/check_md5" md5: true @@ -189,9 +209,9 @@ describe 'file.download file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.touch + await @file.touch target: "#{tmpdir}/a_file" - @file.download + await @file.download source: "#{tmpdir}/a_file" target: "a_dir/download_test" .should.be.rejectedWith message: 'Non Absolute Path: target is "a_dir/download_test", SSH requires absolute paths, you must provide an absolute path in the target or the cwd option' diff --git a/packages/file/test/download_url.coffee b/packages/file/test/download_url.coffee index e7caceb3d..9e947e766 100644 --- a/packages/file/test/download_url.coffee +++ b/packages/file/test/download_url.coffee @@ -1,12 +1,12 @@ -http = require 'http' -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import http from 'node:http' +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.download url', -> + return unless test.tags.posix server = null @@ -38,14 +38,14 @@ describe 'file.download url', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.download + await @file.download source: 'http://localhost:12345' target: "#{tmpdir}/download" .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/download" content: /okay/ - @file.download # Download on an existing file + await @file.download # Download on an existing file source: 'http://localhost:12345' target: "#{tmpdir}/download" .should.be.finally.containEql $status: false @@ -56,12 +56,12 @@ describe 'file.download url', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.download + await @file.download source: 'http://localhost:12345' target: "#{tmpdir}/download_test" mode: 0o0770 .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/download_test" mode: 0o0770 @@ -90,16 +90,16 @@ describe 'file.download url', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.download + await @file.download source: 'http://localhost:12345' target: "#{tmpdir}/target" cache: true cache_file: "#{tmpdir}/cache_file" .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/cache_file" content: /okay/ - @fs.assert + await @fs.assert target: "#{tmpdir}/target" content: /okay/ @@ -128,14 +128,14 @@ describe 'file.download url', -> nikita $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.download + await @file.download $ssh: ssh source: 'http://localhost:12345' target: "#{tmpdir}/download" cache: true cache_dir: "#{tmpdir}/cache_dir" .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert $ssh: null target: "#{tmpdir}/cache_dir/localhost:12345" @@ -146,13 +146,13 @@ describe 'file.download url', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @log.fs + await @log.fs basedir: tmpdir serializer: text: (log) -> "[#{log.level}] #{log.message}\n" - @file + await @file content: 'okay' target: "#{tmpdir}/target" - @file.download + await @file.download source: 'http://localhost:12345' target: "#{tmpdir}/target" md5: 'df8fede7ff71608e24a5576326e41c75' @@ -167,15 +167,15 @@ describe 'file.download url', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file content: "not okay" target: "#{tmpdir}/target" - @file.download + await @file.download source: 'http://localhost:12345' target: "#{tmpdir}/target" md5: 'df8fede7ff71608e24a5576326e41c75' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/target" content: /okay/ @@ -185,7 +185,7 @@ describe 'file.download url', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.download + await @file.download md5: '2f74dbbee4142b7366c93b115f914fff' source: 'http://localhost:12345' target: "#{tmpdir}/target" @@ -197,7 +197,7 @@ describe 'file.download url', -> $tmpdir: true $ssh: ssh , ({metadata: {tmpdir}}) -> - @file.download + await @file.download md5: 'df8fede7ff71608e24a5576326e41c75' source: 'http://localhost:12345' target: "#{tmpdir}/check_md5" @@ -209,10 +209,10 @@ describe 'file.download url', -> $tmpdir: true $ssh: ssh , ({metadata: {tmpdir}}) -> - @file.download + await @file.download source: 'http://localhost:12345' target: "#{tmpdir}/check_md5" - @file.download + await @file.download md5: 'df8fede7ff71608e24a5576326e41c75' source: 'http://localhost:12345' target: "#{tmpdir}/check_md5" @@ -226,7 +226,7 @@ describe 'file.download url', -> $tmpdir: true $ssh: ssh , ({metadata: {tmpdir}}) -> - @file.download + await @file.download source: "http://localhost/sth" target: "a_dir/download_test" .should.be.rejectedWith message: 'Non Absolute Path: target is "a_dir/download_test", SSH requires absolute paths, you must provide an absolute path in the target or the cwd option' diff --git a/packages/file/test/index.coffee b/packages/file/test/index.coffee index 191ab6e1f..2e9f57776 100644 --- a/packages/file/test/index.coffee +++ b/packages/file/test/index.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file', -> + return unless test.tags.posix describe 'schema and validation', -> @@ -21,7 +21,7 @@ describe 'file', -> target: "#{tmpdir}/check_replace" match: 'b' replace: '' - @file + await @file target: "#{tmpdir}/check_replace" match: 'b' replace: null @@ -35,11 +35,11 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'Hello' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'Hello' @@ -48,10 +48,10 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: ({config}) -> 'Hello' - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'Hello' @@ -60,15 +60,15 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'original content' .should.be.finally.containEql $status: true - @file + await @file target: "#{tmpdir}/file" content: 'new content' .should.be.finally.containEql $status: true - @file + await @file target: "#{tmpdir}/file" content: 'new content' .should.be.finally.containEql $status: false @@ -78,21 +78,21 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/a_source" content: 'Hello' - @file + await @file target: "#{tmpdir}/a_target" source: "#{tmpdir}/a_source" .should.be.finally.containEql $status: true - @file + await @file target: "#{tmpdir}/a_target" source: "#{tmpdir}/a_source" .should.be.finally.containEql $status: false - @fs.assert + await @fs.assert target: "#{tmpdir}/a_source" content: 'Hello' - @fs.assert + await @fs.assert target: "#{tmpdir}/a_target" content: 'Hello' @@ -101,11 +101,11 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: '' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: '' @@ -114,18 +114,18 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/empty_file" content: '' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/empty_file" content: '' - @file + await @file target: "#{tmpdir}/empty_file" content: 'toto' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/empty_file" content: 'toto' @@ -134,11 +134,11 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 123 .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: '123' @@ -147,11 +147,11 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/a/missing/dir/a_file" content: 'hello' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/a/missing/dir/a_file" content: 'hello' @@ -160,12 +160,12 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/a/missing/dir/a_file" content: 'hello\r\nworld' remove_empty_lines: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/a/missing/dir/a_file" content: 'hello\rworld' @@ -176,19 +176,19 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file content: 'ko' target: "#{tmpdir}/target" - @fs.link + await @fs.link source: "#{tmpdir}/target" target: "#{tmpdir}/link" - @file + await @file content: 'ok' target: "#{tmpdir}/link" - @fs.assert + await @fs.assert target: "#{tmpdir}/target" content: 'ok' - @fs.assert + await @fs.assert target: "#{tmpdir}/link" content: 'ok' @@ -197,12 +197,12 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @fs.mkdir + await @fs.mkdir target: "#{tmpdir}/target" - @fs.link + await @fs.link source: "#{tmpdir}/target" target: "#{tmpdir}/link" - @file + await @file content: 'ok' target: "#{tmpdir}/link" .should.be.rejectedWith code: 'NIKITA_FS_CRS_TARGET_EISDIR' @@ -212,20 +212,20 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file content: 'ko' target: "#{tmpdir}/a_target" - @fs.link + await @fs.link source: "#{tmpdir}/a_target" target: "#{tmpdir}/a_link" - @file + await @file content: 'ok' target: "#{tmpdir}/a_link" unlink: true - @fs.assert + await @fs.assert target: "#{tmpdir}/a_target" content: 'ko' - @fs.assert + await @fs.assert target: "#{tmpdir}/a_link" content: 'ok' @@ -234,20 +234,20 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @fs.mkdir + await @fs.mkdir target: "#{tmpdir}/target" - @fs.link + await @fs.link source: "#{tmpdir}/target" target: "#{tmpdir}/link" - @file + await @file content: 'ok' target: "#{tmpdir}/link" unlink: true - @fs.assert + await @fs.assert target: "#{tmpdir}/link" content: 'ok' filetype: 'file' - @fs.assert + await @fs.assert target: "#{tmpdir}/target" filetype: 'directory' @@ -258,11 +258,11 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/a_file" content: 'ok' mode: 0o0700 - @fs.assert + await @fs.assert target: "#{tmpdir}/a_file" mode: 0o0700 @@ -271,14 +271,14 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @fs.base.mkdir + await @fs.base.mkdir target: "#{tmpdir}/a_dir" mode: 0o0744 - @file + await @file target: "#{tmpdir}/a_file" content: 'ok' mode: 0o0700 - @fs.assert + await @fs.assert target: "#{tmpdir}/a_dir" mode: 0o0744 @@ -287,14 +287,14 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'hello' mode: 0o0755 - @file + await @file target: "#{tmpdir}/file" content: "hello nikita" - @fs.assert + await @fs.assert target: "#{tmpdir}/file" mode: 0o0755 @@ -303,16 +303,16 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/a_file" content: 'ok' mode: 0o0700 - @file + await @file target: "#{tmpdir}/a_file" content: 'ok' mode: 0o0705 .should.be.finally.containEql $status: true - @file + await @file target: "#{tmpdir}/a_file" content: 'ok' mode: 0o0705 @@ -323,15 +323,15 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/a_file" content: 'Hello' mode: 0o0700 - @file + await @file target: "#{tmpdir}/a_file" content: 'World' mode: 0o0755 - @fs.assert + await @fs.assert target: "#{tmpdir}/a_file" mode: 0o0755 @@ -342,7 +342,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file $ssh: ssh target: "#{tmpdir}/fromto.md" from: '# from' @@ -350,7 +350,7 @@ describe 'file', -> content: 'here we are\n# from\nlets try to replace that one\n# to\nyou coquin' replace: 'my friend' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/fromto.md" content: 'here we are\n# from\nmy friend\n# to\nyou coquin' @@ -359,20 +359,20 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/fromto.md" content: 'here we are\nyou coquin' - @file + await @file target: "#{tmpdir}/fromto.md" from: '# from' to: '# to' append: true replace: 'my friend' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/fromto.md" content: 'here we are\nyou coquin\n# from\nmy friend\n# to' - @file + await @file target: "#{tmpdir}/fromto.md" from: '# from' to: '# to' @@ -380,7 +380,7 @@ describe 'file', -> replace: 'my best friend' eof: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/fromto.md" content: 'here we are\nyou coquin\n# from\nmy best friend\n# to\n' @@ -389,13 +389,13 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/fromto.md" from: '# from' content: 'here we are\n# from\nlets try to replace that one\n# to\nyou coquin' replace: 'my friend' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/fromto.md" content: 'here we are\n# from\nmy friend' @@ -404,13 +404,13 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/fromto.md" to: '# to' content: 'here we are\n# from\nlets try to replace that one\n# to\nyou coquin' replace: 'my friend' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/fromto.md" describe 'config `replace`', -> @@ -420,12 +420,12 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/fromto.md" content: 'here we are\nyou+coquin' replace: 'my friend' place_before: 'you+coquin' # Regexp must escape the plus sign - @fs.assert + await @fs.assert target: "#{tmpdir}/fromto.md" content: 'here we are\nmy friend\nyou+coquin' @@ -434,12 +434,12 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/fromto.md" content: 'here we are\nyou coquin' replace: 'my friend' place_before: /^you coquin$/m - @fs.assert + await @fs.assert target: "#{tmpdir}/fromto.md" content: 'here we are\nmy friend\nyou coquin' @@ -450,18 +450,18 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/fromto.md" match: 'lets try to replace that one' content: 'here we are\nlets try to replace that one\nyou coquin' replace: 'my friend' .should.be.finally.containEql $status: true - @file + await @file target: "#{tmpdir}/fromto.md" match: 'my friend' replace: 'my friend' .should.be.finally.containEql $status: false - @fs.assert + await @fs.assert target: "#{tmpdir}/fromto.md" content: 'here we are\nmy friend\nyou coquin' @@ -470,13 +470,13 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/fromto.md" match: 'replace' content: 'replace that one\nand\nreplace this one\nand not this one' replace: 'switch' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/fromto.md" content: 'switch that one\nand\nswitch this one\nand not this one' @@ -486,18 +486,18 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/replace" content: 'email=david(at)adaltas(dot)com\nusername=root' match: /(username)=(.*)/ replace: '$1=david (was $2)' .should.be.finally.containEql $status: true - @file # Without a match + await @file # Without a match target: "#{tmpdir}/replace" match: /this wont work/ replace: '$1=david (was $2)' .should.be.finally.containEql $status: false - @fs.assert + await @fs.assert target: "#{tmpdir}/replace" content: 'email=david(at)adaltas(dot)com\nusername=david (was root)' @@ -506,13 +506,13 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file match: /(.*try) (.*)/ content: 'here we are\nlets try to replace that one\nyou coquin' replace: ['my friend, $1'] target: "#{tmpdir}/replace" .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/replace" content: 'here we are\nmy friend, lets try\nyou coquin' @@ -521,7 +521,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file append: true content: 'aaa\nmatch\nccc\nmatch' eof: true @@ -529,7 +529,7 @@ describe 'file', -> replace: '' target: "#{tmpdir}/replace" .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/replace" content: 'aaa\nccc\n' @@ -538,18 +538,18 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/replace" match: /^property=.*$/mg content: '#A config file\n#property=30\nproperty=10\nproperty=20\n#End of Config' replace: 'property=50' .should.be.finally.containEql $status: true - @file + await @file target: "#{tmpdir}/replace" match: /^property=50$/mg replace: 'property=50' .should.be.finally.containEql $status: false - @fs.assert + await @fs.assert target: "#{tmpdir}/replace" content: '#A config file\n#property=30\nproperty=50\nproperty=50\n#End of Config' @@ -558,20 +558,20 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/a_file" content: 'This is\nsome content\nfor testing' - @file + await @file target: "#{tmpdir}/a_file" match: /(.*content)/ replace: 'a text' .should.be.finally.containEql $status: true - @file + await @file target: "#{tmpdir}/a_file" match: /(.*content)/ replace: 'a text' .should.be.finally.containEql $status: false - @fs.assert + await @fs.assert target: "#{tmpdir}/a_file" content: 'This is\na text\nfor testing' @@ -583,15 +583,15 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/a_file" content: 'world' place_before: true - @file # File exists, prepends to it + await @file # File exists, prepends to it target: "#{tmpdir}/a_file" replace: 'hello' place_before: true - @fs.assert + await @fs.assert target: "#{tmpdir}/a_file" content: 'hello\nworld' @@ -603,11 +603,11 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/a_file" content: 'hello' append: true - @fs.assert + await @fs.assert target: "#{tmpdir}/a_file" content: 'hello' @@ -617,15 +617,15 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/a_file" content: 'hello' append: true - @file # File exists, it append to it + await @file # File exists, it append to it target: "#{tmpdir}/a_file" content: 'world' append: true - @fs.assert + await @fs.assert target: "#{tmpdir}/a_file" content: 'helloworld' @@ -639,26 +639,26 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'you coquin\nhere we are\n' - @file + await @file target: "#{tmpdir}/file" match: /.*coquin/ replace: 'new coquin' place_before: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'new coquin\nhere we are\n' # Write a second time with same match - @file + await @file target: "#{tmpdir}/file" match: /.*coquin/ replace: 'new coquin' place_before: true .should.be.finally.containEql $status: false - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'new coquin\nhere we are\n' @@ -667,26 +667,26 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'you coquin\nhere we are\n' - @file + await @file target: "#{tmpdir}/file" match: "you coquin" replace: 'new coquin' place_before: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'new coquin\nhere we are\n' # Write a second time with same match - @file + await @file target: "#{tmpdir}/file" match: "new coquin" replace: 'new coquin' place_before: true .should.be.finally.containEql $status: false - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'new coquin\nhere we are\n' @@ -696,26 +696,26 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'here we are\nyou coquin\n' - @file + await @file target: "#{tmpdir}/file" match: /.*coquin/ replace: 'new coquin' append: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'here we are\nnew coquin\n' # Write a second time with same match - @file + await @file target: "#{tmpdir}/file" match: /.*coquin/ replace: 'new coquin' append: true .should.be.finally.containEql $status: false - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'here we are\nnew coquin\n' @@ -725,16 +725,16 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'here we are\nyou coquin\n' - @file + await @file target: "#{tmpdir}/file" match: /will never work/ replace: 'Add this line' append: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'here we are\nyou coquin\nAdd this line' @@ -746,16 +746,16 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'here we are\nyou coquin\nshould we\nhave fun' - @file + await @file target: "#{tmpdir}/file" match: /will never work/ replace: 'Add this line' place_before: /^.*we.*$/m .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'Add this line\nhere we are\nyou coquin\nshould we\nhave fun' @@ -765,16 +765,16 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'here we are\nyou coquin\nshould we\nhave fun' - @file + await @file target: "#{tmpdir}/file" match: /will never work/ replace: 'Add this line' append: /^.*we.*$/m .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'here we are\nAdd this line\nyou coquin\nshould we\nhave fun' @@ -785,16 +785,16 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'here we are\nyou coquin\nshould we\nhave fun' - @file + await @file target: "#{tmpdir}/file" match: /will never work/ replace: 'Add this line' place_before: /^.*we.*$/gm .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'Add this line\nhere we are\nyou coquin\nAdd this line\nshould we\nhave fun' @@ -804,16 +804,16 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'here we are\nyou coquin\nshould we\nhave fun' - @file + await @file target: "#{tmpdir}/file" match: /will never work/ replace: 'Add this line' append: /^.*we.*$/gm .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'here we are\nAdd this line\nyou coquin\nshould we\nAdd this line\nhave fun' @@ -822,16 +822,16 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'here we are\nyou coquin\nshould we\nhave fun' - @file + await @file target: "#{tmpdir}/file" match: /will never work/ replace: 'Add this line' append: 'we' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'here we are\nAdd this line\nyou coquin\nshould we\nAdd this line\nhave fun' @@ -842,16 +842,16 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'here we are\nyou coquin' - @file + await @file target: "#{tmpdir}/file" match: /will never be found/ replace: 'Add this line' place_before: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'Add this line\nhere we are\nyou coquin' @@ -860,16 +860,16 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'here we are\nyou coquin' - @file + await @file target: "#{tmpdir}/file" match: /will never be found/ replace: 'Add this line' append: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'here we are\nyou coquin\nAdd this line' @@ -880,13 +880,13 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" match: /will never be found/ replace: 'Add this line' place_before: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'Add this line' @@ -895,13 +895,13 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" match: /will never be found/ replace: 'Add this line' append: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'Add this line' @@ -910,27 +910,27 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/a_file" content: 'Here we are\nyou coquin' - @file + await @file target: "#{tmpdir}/a_file" replace: 'Add this line' append: true .should.be.finally.containEql $status: true - @file + await @file target: "#{tmpdir}/a_file" replace: 'Add this line' append: true .should.be.finally.containEql $status: false - @file + await @file target: "#{tmpdir}/a_file" write: [ replace: 'Add this line' append: true ] .should.be.finally.containEql $status: false - @fs.assert + await @fs.assert target: "#{tmpdir}/a_file" content: 'Here we are\nyou coquin\nAdd this line' @@ -941,23 +941,23 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'Hello' - @file + await @file target: "#{tmpdir}/file" content: 'Hello' backup: '.bck' .should.be.finally.containEql $status: false - @fs.assert + await @fs.assert target: "#{tmpdir}/file.bck" not: true - @file + await @file target: "#{tmpdir}/file" content: 'Hello Node' backup: '.bck' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file.bck" content: 'Hello' @@ -966,12 +966,12 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'Hello' backup: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'Hello' @@ -980,27 +980,27 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/new_file_perm" content: 'Hello World' - @file + await @file target: "#{tmpdir}/new_file_perm" content: 'Hello' mode: 0o0644 backup: '.bck1' .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/new_file_perm.bck1" content: 'Hello World' mode: 0o0400 - @file + await @file target: "#{tmpdir}/new_file_perm" content: 'Hello World' backup: '.bck2' mode: 0o0644 backup_mode: 0o0640 .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/new_file_perm.bck2" content: 'Hello' mode: 0o0640 @@ -1012,10 +1012,10 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'username: me\nemail: my@email\nfriends: you' - @file + await @file target: "#{tmpdir}/file" write: [ match: /^(username).*$/m @@ -1028,7 +1028,7 @@ describe 'file', -> replace: "$1: me" ] .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'username: you\n\nfriends: me' @@ -1037,10 +1037,10 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'username: me\nfriends: you' - @file + await @file target: "#{tmpdir}/file" write: [ match: /^(username).*$/m @@ -1054,7 +1054,7 @@ describe 'file', -> replace: "$1: me" ] .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'username: you\nemail: your@email\nfriends: me' @@ -1063,10 +1063,10 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'username: me\nfriends: none' - @file + await @file target: "#{tmpdir}/file" write: [ match: /^will never match$/m @@ -1080,7 +1080,7 @@ describe 'file', -> replace: "$1: you" ] .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'username: me\nemail: my@email\nfriends: you' @@ -1091,7 +1091,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: 'abc' source: 'abc' content: 'abc' @@ -1102,7 +1102,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" source: "#{tmpdir}/does/not/exists" .should.be.rejectedWith message: "Source does not exist: \"#{tmpdir}/does/not/exists\"" @@ -1112,7 +1112,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" source: "#{tmpdir}/does/not/exists" local: true @@ -1125,12 +1125,12 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'this is\r\nsome content' eof: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'this is\r\nsome content\r\n' @@ -1139,13 +1139,13 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file $ssh: ssh target: "#{tmpdir}/file" content: 'this is some content' eof: true .should.be.finally.containEql $status: true - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'this is some content\n' @@ -1156,7 +1156,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'hello' transform: ({config}) -> @@ -1168,12 +1168,12 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'hello' transform: ({config}) -> "#{config.content} world" - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'hello world' @@ -1182,7 +1182,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'hello' transform: -> @@ -1195,13 +1195,13 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'hello' transform: -> new Promise (resolve, reject) -> resolve('hello world') - @fs.assert + await @fs.assert target: "#{tmpdir}/file" content: 'hello world' @@ -1210,7 +1210,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'hello' transform: -> @@ -1223,13 +1223,13 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'hello' transform: -> new Promise (resolve, reject) -> reject() - @fs.assert + await @fs.assert target: "#{tmpdir}/file" not: true @@ -1238,7 +1238,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'hello' transform: ({config}) -> @@ -1250,12 +1250,12 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'hello' transform: ({config}) -> null - @fs.assert + await @fs.assert target: "#{tmpdir}/file" not: true @@ -1264,7 +1264,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'hello' transform: ({config}) -> @@ -1276,12 +1276,12 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'hello' transform: ({config}) -> undefined - @fs.assert + await @fs.assert target: "#{tmpdir}/file" not: true @@ -1290,7 +1290,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file target: "#{tmpdir}/file" content: 'hello' transform: ({config}) -> @@ -1304,7 +1304,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file content: 'hello' target: ({content: content}) -> throw Error content @@ -1315,7 +1315,7 @@ describe 'file', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file + await @file content: 'hello' eof: true target: ({content: content}) -> diff --git a/packages/file/test/index.diff.coffee b/packages/file/test/index.diff.coffee index 95d392ba2..e6bed17a6 100644 --- a/packages/file/test/index.diff.coffee +++ b/packages/file/test/index.diff.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file config diff', -> + return unless test.tags.posix they 'type is a function', ({ssh}) -> diffcalled = false diff --git a/packages/file/test/ini/index.coffee b/packages/file/test/ini/index.coffee index 83cbf6f3f..e4b0836fa 100644 --- a/packages/file/test/ini/index.coffee +++ b/packages/file/test/ini/index.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.ini', -> + return unless test.tags.posix they 'stringify an object', ({ssh}) -> nikita diff --git a/packages/file/test/ini/index.stringify_brackets_then_curly.coffee b/packages/file/test/ini/index.stringify_brackets_then_curly.coffee index 00f40fed6..aaa418d95 100644 --- a/packages/file/test/ini/index.stringify_brackets_then_curly.coffee +++ b/packages/file/test/ini/index.stringify_brackets_then_curly.coffee @@ -1,12 +1,12 @@ -nikita = require '@nikitajs/core/lib' -utils = require '../../lib/utils' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import utils from '@nikitajs/file/utils' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.ini option stringify_brackets_then_curly', -> + return unless test.tags.posix # TODO: move to `utils.ini` tests diff --git a/packages/file/test/ini/index.stringify_single_key.coffee b/packages/file/test/ini/index.stringify_single_key.coffee index 6a137b3bb..0e14dbf55 100644 --- a/packages/file/test/ini/index.stringify_single_key.coffee +++ b/packages/file/test/ini/index.stringify_single_key.coffee @@ -1,12 +1,12 @@ -nikita = require '@nikitajs/core/lib' -utils = require '../../lib/utils' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import utils from '@nikitajs/file/utils' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.ini option stringify_single_key', -> + return unless test.tags.posix # TODO: move to `utils.ini` tests diff --git a/packages/file/test/ini/read.coffee b/packages/file/test/ini/read.coffee index 2b1a12208..b7290321d 100644 --- a/packages/file/test/ini/read.coffee +++ b/packages/file/test/ini/read.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.ini.read', -> + return unless test.tags.posix they 'parse to an object', ({ssh}) -> nikita diff --git a/packages/file/test/json.coffee b/packages/file/test/json.coffee index d23892046..630d995c0 100644 --- a/packages/file/test/json.coffee +++ b/packages/file/test/json.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.json', -> + return unless test.tags.posix they 'stringify content to target', ({ssh}) -> nikita diff --git a/packages/file/test/properties/index.coffee b/packages/file/test/properties/index.coffee index d932a0056..53711e32a 100644 --- a/packages/file/test/properties/index.coffee +++ b/packages/file/test/properties/index.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.properties', -> + return unless test.tags.posix they 'overwrite by default', ({ssh}) -> nikita diff --git a/packages/file/test/properties/read.coffee b/packages/file/test/properties/read.coffee index 52b7f3d79..d237eab58 100644 --- a/packages/file/test/properties/read.coffee +++ b/packages/file/test/properties/read.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.properties.read', -> + return unless test.tags.posix they 'read single key', ({ssh}) -> nikita diff --git a/packages/file/test/render.coffee b/packages/file/test/render.coffee index b8d591b3c..98bc1bcdd 100644 --- a/packages/file/test/render.coffee +++ b/packages/file/test/render.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.render', -> + return unless test.tags.posix describe 'error', -> diff --git a/packages/file/test/test.coffee b/packages/file/test/test.coffee index f17f2b1bb..ccc602447 100644 --- a/packages/file/test/test.coffee +++ b/packages/file/test/test.coffee @@ -1,14 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config +export default config.default diff --git a/packages/file/test/touch.coffee b/packages/file/test/touch.coffee index c5cf5dd5b..c52c7edae 100644 --- a/packages/file/test/touch.coffee +++ b/packages/file/test/touch.coffee @@ -1,12 +1,13 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.touch', -> describe 'schema', -> - return unless tags.api + return unless test.tags.api it 'inheric file schema', -> nikita.file.touch @@ -20,7 +21,7 @@ describe 'file.touch', -> config.target.should.eql '/tmp/fake' describe 'usage', -> - return unless tags.posix + return unless test.tags.posix they 'as a target option', ({ssh}) -> nikita diff --git a/packages/file/test/types/ceph_conf.coffee b/packages/file/test/types/ceph_conf.coffee index 97d6b5301..50a6774ee 100644 --- a/packages/file/test/types/ceph_conf.coffee +++ b/packages/file/test/types/ceph_conf.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.types.ceph_conf', -> + return unless test.tags.posix they 'generate from content', ({ssh}) -> nikita diff --git a/packages/file/test/types/hfile.coffee b/packages/file/test/types/hfile.coffee index 3b00a78fa..b9e996803 100644 --- a/packages/file/test/types/hfile.coffee +++ b/packages/file/test/types/hfile.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.types.hfile', -> + return unless test.tags.posix they 'without properties', ({ssh}) -> nikita diff --git a/packages/file/test/types/krb5_conf.coffee b/packages/file/test/types/krb5_conf.coffee index 7635e50ad..eab6c264b 100644 --- a/packages/file/test/types/krb5_conf.coffee +++ b/packages/file/test/types/krb5_conf.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.types.krb5_conf', -> + return unless test.tags.posix they 'write content (default MIT Kerberos file)', ({ssh}) -> nikita diff --git a/packages/file/test/types/locale_gen.coffee b/packages/file/test/types/locale_gen.coffee index db0397f19..e13069495 100644 --- a/packages/file/test/types/locale_gen.coffee +++ b/packages/file/test/types/locale_gen.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.types.locale_gen', -> + return unless test.tags.posix they 'activate locales', ({ssh}) -> nikita diff --git a/packages/file/test/types/my_cnf.coffee b/packages/file/test/types/my_cnf.coffee index 9b6038398..ac66549c0 100644 --- a/packages/file/test/types/my_cnf.coffee +++ b/packages/file/test/types/my_cnf.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.types.my_cnf', -> + return unless test.tags.posix they 'generate from content', ({ssh}) -> nikita diff --git a/packages/file/test/types/pacman_conf.coffee b/packages/file/test/types/pacman_conf.coffee index b39224397..6a38e7de9 100644 --- a/packages/file/test/types/pacman_conf.coffee +++ b/packages/file/test/types/pacman_conf.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.types.pacman_conf', -> + return unless test.tags.posix they 'empty values dont print values', ({ssh}) -> nikita diff --git a/packages/file/test/types/ssh_authorized_keys.coffee b/packages/file/test/types/ssh_authorized_keys.coffee index 28277e4f8..b53ce592a 100644 --- a/packages/file/test/types/ssh_authorized_keys.coffee +++ b/packages/file/test/types/ssh_authorized_keys.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.types.ssh_authorized_keys', -> + return unless test.tags.posix they 'overwrite file', ({ssh}) -> nikita diff --git a/packages/file/test/types/systemd/resolved.coffee b/packages/file/test/types/systemd/resolved.coffee index db35d9202..6a169dcf2 100644 --- a/packages/file/test/types/systemd/resolved.coffee +++ b/packages/file/test/types/systemd/resolved.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.types.systemd.resolved', -> + return unless test.tags.posix they 'servers as a string', ({ssh}) -> nikita diff --git a/packages/file/test/types/systemd/timesyncd.coffee b/packages/file/test/types/systemd/timesyncd.coffee index 6693f1c03..abddcdb30 100644 --- a/packages/file/test/types/systemd/timesyncd.coffee +++ b/packages/file/test/types/systemd/timesyncd.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.types.systemd.timesyncd', -> + return unless test.tags.posix they 'servers as a string', ({ssh}) -> nikita diff --git a/packages/file/test/types/test.coffee b/packages/file/test/types/test.coffee deleted file mode 100644 index f17f2b1bb..000000000 --- a/packages/file/test/types/test.coffee +++ /dev/null @@ -1,14 +0,0 @@ - -fs = require 'fs' -# Write default configuration -if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") -) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config -# Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" -# Export configuration -module.exports = config diff --git a/packages/file/test/types/wireguard_conf.coffee b/packages/file/test/types/wireguard_conf.coffee index 1cbda5f86..f6f749866 100644 --- a/packages/file/test/types/wireguard_conf.coffee +++ b/packages/file/test/types/wireguard_conf.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.types.wireguard_conf', -> + return unless test.tags.posix they 'simple values', ({ssh}) -> nikita diff --git a/packages/file/test/types/yum_repo.coffee b/packages/file/test/types/yum_repo.coffee index 2a4484729..18320a208 100644 --- a/packages/file/test/types/yum_repo.coffee +++ b/packages/file/test/types/yum_repo.coffee @@ -1,11 +1,12 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import utils from '@nikitajs/file/utils' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.types.yum_repo', -> + return unless test.tags.posix they 'generate from content object', ({ssh}) -> nikita @@ -32,7 +33,7 @@ describe 'file.types.yum_repo', -> 'gpgcheck': '1' 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7' $status.should.be.false() - @fs.assert + await @fs.assert target: "#{tmpdir}/test.repo" they 'merge with content object', ({ssh}) -> @@ -58,7 +59,7 @@ describe 'file.types.yum_repo', -> 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7' merge: true $status.should.be.true() - @fs.assert + await @fs.assert target: "#{tmpdir}/test.repo" they 'write to default repository dir', ({ssh}) -> @@ -74,7 +75,7 @@ describe 'file.types.yum_repo', -> 'mirrorlist': 'http://test/?infra=$infra' 'baseurl': 'http://mirror.centos.org' $status.should.be.true() - @fs.assert + await @fs.assert target: "#{tmpdir}/test.repo" content: """ [test-repo-0.0.3] @@ -89,10 +90,17 @@ describe 'file.types.yum_repo', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> + await @file.types.yum_repo + target: "#{tmpdir}/original.repo" + content: + 'test-repo-0.0.3': + 'name': 'CentOS' + 'mirrorlist': 'http://test/?infra=$infra' + 'baseurl': 'http://mirror.centos.org' {$status} = await @file.types.yum_repo - target: "#{tmpdir}/CentOS-nikita.repo" - source: "#{__dirname}/../resources/CentOS-nikita.repo" local: true + source: "#{tmpdir}/original.repo" + target: "#{tmpdir}/new.repo" content: "test-repo-0.0.4": 'name': 'CentOS-$releasever - Base' @@ -101,5 +109,9 @@ describe 'file.types.yum_repo', -> 'gpgcheck': '1' 'gpgkey': 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7' $status.should.be.true() - @fs.assert - target: "#{tmpdir}/CentOS-nikita.repo" + await @fs.assert + target: "#{tmpdir}/new.repo" + {data} = await @file.ini.read + parse: utils.ini.parse_multi_brackets, + target: "#{tmpdir}/new.repo" + Object.keys(data).should.eql [ 'test-repo-0.0.3', 'test-repo-0.0.4' ] diff --git a/packages/file/test/upload.coffee b/packages/file/test/upload.coffee index f77e71462..0a092255b 100644 --- a/packages/file/test/upload.coffee +++ b/packages/file/test/upload.coffee @@ -1,53 +1,51 @@ -path = require 'path' -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import path from 'node:path' +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.upload', -> + return unless test.tags.posix they 'source is missing', ({ssh}) -> nikita $ssh: ssh - $tmpdir: true - , ({metadata: {tmpdir}}) -> - @file.upload - target: "#{tmpdir}/#{path.basename __filename}" - .should.be.rejectedWith [ - 'NIKITA_SCHEMA_VALIDATION_CONFIG:' - 'one error was found in the configuration of action `file.upload`:' - '#/required config must have required property \'source\'.' - ].join ' ' + .file.upload + target: "a_dir/a_file" + .should.be.rejectedWith [ + 'NIKITA_SCHEMA_VALIDATION_CONFIG:' + 'one error was found in the configuration of action `file.upload`:' + '#/required config must have required property \'source\'.' + ].join ' ' they 'target is missing', ({ssh}) -> nikita $ssh: ssh - $tmpdir: true - , ({metadata: {tmpdir}}) -> - @file.upload - source: "#{__filename}" - .should.be.rejectedWith [ - 'NIKITA_SCHEMA_VALIDATION_CONFIG:' - 'one error was found in the configuration of action `file.upload`:' - '#/required config must have required property \'target\'.' - ].join ' ' + .file.upload + source: "a_dir/a_file" + .should.be.rejectedWith [ + 'NIKITA_SCHEMA_VALIDATION_CONFIG:' + 'one error was found in the configuration of action `file.upload`:' + '#/required config must have required property \'target\'.' + ].join ' ' they 'file into a file', ({ssh}) -> nikita $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.upload - source: "#{__filename}" - target: "#{tmpdir}/#{path.basename __filename}" + await @file.touch "#{tmpdir}/a_file" + await @fs.mkdir "#{tmpdir}/target_dir" + await @file.upload + source: "#{tmpdir}/a_file" + target: "#{tmpdir}/target_dir/a_file" .should.be.finally.containEql $status: true - @fs.assert - target: "#{tmpdir}/#{path.basename __filename}" - @file.upload - source: "#{__filename}" - target: "#{tmpdir}/#{path.basename __filename}" + await @fs.assert + target: "#{tmpdir}/target_dir/a_file" + await @file.upload + source: "#{tmpdir}/a_file" + target: "#{tmpdir}/target_dir/a_file" .should.be.finally.containEql $status: false they 'file into a directory', ({ssh}) -> @@ -55,13 +53,15 @@ describe 'file.upload', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @file.upload - source: "#{__filename}" - target: "#{tmpdir}" + await @file.touch "#{tmpdir}/a_file" + await @fs.mkdir "#{tmpdir}/target_dir" + await @file.upload + source: "#{tmpdir}/a_file" + target: "#{tmpdir}/target_dir" .should.be.finally.containEql $status: true - @fs.assert - target: "#{tmpdir}/#{path.basename __filename}" - @file.upload - source: "#{__filename}" - target: "#{tmpdir}" + await @fs.assert + target: "#{tmpdir}/target_dir/a_file" + await @file.upload + source: "#{tmpdir}/a_file" + target: "#{tmpdir}/target_dir" .should.be.finally.containEql $status: false diff --git a/packages/file/test/utils/ini/parse.coffee b/packages/file/test/utils/ini/parse.coffee index bc39c9245..098effa77 100644 --- a/packages/file/test/utils/ini/parse.coffee +++ b/packages/file/test/utils/ini/parse.coffee @@ -1,11 +1,10 @@ -{ini} = require '../../../lib/utils' -{tags} = require '../../test' +import {ini} from '@nikitajs/file/utils' +import test from '../../test.coffee' should.config.checkProtoEql = false -return unless tags.api - describe 'utils.ini.parse', -> + return unless test.tags.api it 'parse depth 1', -> data = ini.parse """ diff --git a/packages/file/test/utils/ini/parse_brackets_then_curly.coffee b/packages/file/test/utils/ini/parse_brackets_then_curly.coffee index d5bd6da0e..96bc667ab 100644 --- a/packages/file/test/utils/ini/parse_brackets_then_curly.coffee +++ b/packages/file/test/utils/ini/parse_brackets_then_curly.coffee @@ -1,10 +1,9 @@ -{ini} = require '../../../lib/utils' -{tags} = require '../../test' - -return unless tags.api +import { ini } from '@nikitajs/file/utils' +import test from '../../test.coffee' describe 'utils.ini.parse_brackets_then_curly', -> + return unless test.tags.api it 'parse braket, no values', -> ini.parse_brackets_then_curly """ diff --git a/packages/file/test/utils/ini/parse_multi_brackets.coffee b/packages/file/test/utils/ini/parse_multi_brackets.coffee index f5723243c..c40ba794f 100644 --- a/packages/file/test/utils/ini/parse_multi_brackets.coffee +++ b/packages/file/test/utils/ini/parse_multi_brackets.coffee @@ -1,10 +1,9 @@ -{ini} = require '../../../lib/utils' -{tags} = require '../../test' - -return unless tags.api +import {ini} from '@nikitajs/file/utils' +import test from '../../test.coffee' describe 'utils.ini.parse_multi_brackets', -> + return unless test.tags.api describe 'multi brackets', -> diff --git a/packages/file/test/utils/ini/parse_multi_brackets_multi_lines.coffee b/packages/file/test/utils/ini/parse_multi_brackets_multi_lines.coffee index fb1cb20cc..eaa33723d 100644 --- a/packages/file/test/utils/ini/parse_multi_brackets_multi_lines.coffee +++ b/packages/file/test/utils/ini/parse_multi_brackets_multi_lines.coffee @@ -1,10 +1,9 @@ -{ini} = require '../../../lib/utils' -{tags} = require '../../test' - -return unless tags.api +import {ini} from '@nikitajs/file/utils' +import test from '../../test.coffee' describe 'utils.ini.parse_multi_brackets_multi_lines', -> + return unless test.tags.api it 'parse', -> ini.parse_multi_brackets_multi_lines """ diff --git a/packages/file/test/utils/ini/split_by_dots.coffee b/packages/file/test/utils/ini/split_by_dots.coffee index bee8ba639..91382ff86 100644 --- a/packages/file/test/utils/ini/split_by_dots.coffee +++ b/packages/file/test/utils/ini/split_by_dots.coffee @@ -1,11 +1,10 @@ -{ini} = require '../../../lib/utils' -{tags} = require '../../test' +import {ini} from '@nikitajs/file/utils' +import test from '../../test.coffee' should.config.checkProtoEql = false -return unless tags.api - describe 'utils.ini.split_by_dots', -> + return unless test.tags.api it 'several dot', -> splits = ini.split_by_dots('a.bb.ddd').should.eql [ diff --git a/packages/file/test/utils/ini/stringify.coffee b/packages/file/test/utils/ini/stringify.coffee index 444645cad..7e501e2b4 100644 --- a/packages/file/test/utils/ini/stringify.coffee +++ b/packages/file/test/utils/ini/stringify.coffee @@ -1,10 +1,9 @@ -{ini} = require '../../../lib/utils' -{tags} = require '../../test' - -return unless tags.api +import {ini} from '@nikitajs/file/utils' +import test from '../../test.coffee' describe 'utils.ini.stringify', -> + return unless test.tags.api it 'honors option separator', -> ini.stringify diff --git a/packages/file/test/utils/ini/stringify_brackets_then_curly.coffee b/packages/file/test/utils/ini/stringify_brackets_then_curly.coffee index dd0d9b8e2..a5cb1bd4a 100644 --- a/packages/file/test/utils/ini/stringify_brackets_then_curly.coffee +++ b/packages/file/test/utils/ini/stringify_brackets_then_curly.coffee @@ -1,10 +1,9 @@ -{ini} = require '../../../lib/utils' -{tags} = require '../../test' - -return unless tags.api +import {ini} from '@nikitajs/file/utils' +import test from '../../test.coffee' describe 'utils.ini.stringify_brackets_then_curly', -> + return unless test.tags.api it 'option eol', -> ini.stringify_brackets_then_curly diff --git a/packages/file/test/utils/ini/stringify_multi_brackets.coffee b/packages/file/test/utils/ini/stringify_multi_brackets.coffee index 811a6318c..c0e69d090 100644 --- a/packages/file/test/utils/ini/stringify_multi_brackets.coffee +++ b/packages/file/test/utils/ini/stringify_multi_brackets.coffee @@ -1,10 +1,9 @@ -{ini} = require '../../../lib/utils' -{tags} = require '../../test' - -return unless tags.api +import {ini} from '@nikitajs/file/utils' +import test from '../../test.coffee' describe 'utils.ini.stringify_multi_brackets', -> + return unless test.tags.api it 'stringify test eol', -> res = ini.stringify_multi_brackets diff --git a/packages/file/test/yaml.coffee b/packages/file/test/yaml.coffee index 105f5ac26..a308a5a87 100644 --- a/packages/file/test/yaml.coffee +++ b/packages/file/test/yaml.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'file.yaml', -> + return unless test.tags.posix they 'stringify an object', ({ssh}) -> nikita diff --git a/packages/ipa/README.md b/packages/ipa/README.md index 6a1a36eb2..27e7a676d 100644 --- a/packages/ipa/README.md +++ b/packages/ipa/README.md @@ -3,23 +3,26 @@ The "ipa" package provides Nikita actions for various FreeIPA operations. -## Running the test - -The tests require a local LXD client. On a Linux hosts, you can follow the [installation instructions](https://linuxcontainers.org/lxd/getting-started-cli/). On non Linux hosts, you can setup the client to communicate with a remote LXD server hosted on a virtual machine. You will however have to mount the project directory into the "/nikita" folder of the virtual machine. The provided Vagrantfile definition inside "@nikitajs/core/env/cluster/assets" will set you up. - -``` -# For windows and osx user -../lxd/bin/cluster start -export NIKITA_HOME=/nikita -# Start the server -coffee ./env/ipa/start.coffee -# Run all the tests -lxc exec freeipa --cwd /nikita/packages/ipa npm test -# Run selected tests -lxc exec freeipa --cwd /nikita/packages/ipa npx mocha test/user/exists.coffee -# Enter the IPA container -lxc exec freeipa --cwd /nikita/packages/ipa bash -npm test +## Usage + +```js +import "@nikitajs/ipa/register"; +import nikita from "@nikitajs/core"; + +const {$status} = await nikita.ipa.user({ + uid: "my_username", + attributes: { + givenname: "My Firstname", + sn: "My Lastname", + mail: "my_username@nikita.js.org", + }, + connection: { + "principal": "admin", + "password": "admin_pw", + "url": "https://ipa.nikita.local/ipa/session/json", + }, +}); +console.info("User was modified:", $status); ``` ## Notes @@ -30,7 +33,7 @@ npm test ``` [1/29]: configuring certificate server instance -[error] IOError: [Errno 13] Permission denied: '/tmp/tmp_Tm1l_' +[error] IOError: [Errno 13] Permission denied: "/tmp/tmp_Tm1l_" ``` -Host must have `fs.protected_regular` set to `0`, eg `echo '0' > /proc/sys/fs/protected_regular && sysctl -p && sysctl -a`. In our Physical -> VM -> LXD setup, the parameters shall be set in the VM, no restart is required to install the FreeIPA server, just uninstall it first with `ipa-server-install --uninstall` before re-executing the install command. +Host must have `fs.protected_regular` set to `0`, eg `echo "0" > /proc/sys/fs/protected_regular && sysctl -p && sysctl -a`. In our Physical -> VM -> LXD setup, the parameters shall be set in the VM, no restart is required to install the FreeIPA server, just uninstall it first with `ipa-server-install --uninstall` before re-executing the install command. diff --git a/packages/ipa/env/ipa/index.coffee b/packages/ipa/env/ipa/index.coffee deleted file mode 100644 index 1635b2aba..000000000 --- a/packages/ipa/env/ipa/index.coffee +++ /dev/null @@ -1,145 +0,0 @@ - -path = require 'path' -runner = require '@nikitajs/lxd-runner' - -# Note: - -# Jan 20th, 2020: upgrading ubuntu to 19.10 on the host vm -# lead to an error while installing freeipa -# complaining that it cannot write into /tmp -# solution involve to run on the host machine -# check: `cat /proc/sys/fs/protected_regular` -# Temporary: `echo '0' > /proc/sys/fs/protected_regular && sysctl -p` -# Permanently: `echo 'fs.protected_regular = 0' >> /etc/sysctl.conf && sysctl -p` - -# Error starting IPA -# command: -# ipactl start -# Starting Directory Service -# Failed to start Directory Service: Command '/bin/systemctl start dirsrv@NIKITA-LOCAL.service' returned non-zero exit status 1 -# Solution: -# Check available space on host machine - -# When adding principal, got Error -# Host 'ipa.nikita.local' does not have corresponding DNS A/AAAA record -# Short term solution: -# Reset the DNS server in resolv.conf with the IPA DNS -# echo 'search nikita.local' > /etc/resolv.conf -# echo 'nameserver 10.10.11.2' >> /etc/resolv.conf -# ipactl restart -# Long term solution: -# Disable the re-generation of resolv.conf by /usr/sbin/dhclient-script - -runner - cwd: '/nikita/packages/ipa' - container: 'nikita-ipa' - logdir: path.resolve __dirname, './logs' - cluster: - # FreeIPA do a reverse lookup on initialisation - # Using the default bridge yields to the error - # `The host name freeipa.nikita does not match the value freeipa.lxd obtained by reverse lookup on IP address fd42:f662:97ea:ba7f:216:3eff:fe1d:96f2%215` - networks: - nktipapub: - 'ipv4.address': '10.10.11.1/24' - 'ipv4.nat': true - 'ipv6.address': 'none' - 'dns.domain': 'nikita.local' - containers: - 'nikita-ipa': - image: 'images:almalinux/8' - properties: - 'environment.NIKITA_TEST_MODULE': '/nikita/packages/ipa/env/ipa/test.coffee' - 'raw.idmap': if process.env['NIKITA_LXD_IN_VAGRANT'] - then 'both 1000 0' - else "both #{process.getuid()} 0" - disk: - nikitadir: - path: '/nikita' - source: process.env['NIKITA_HOME'] or path.join(__dirname, '../../../../') - nic: - eth0: - name: 'eth0', nictype: 'bridged', parent: 'nktipapub' - 'ipv4.address': '10.10.11.2' - proxy: - ssh: listen: 'tcp:0.0.0.0:2200', connect: 'tcp:127.0.0.1:22' - ipa_ui_http: listen: 'tcp:0.0.0.0:2080', connect: 'tcp:127.0.0.1:80' - ipa_ui_https: listen: 'tcp:0.0.0.0:2443', connect: 'tcp:127.0.0.1:443' - ssh: enabled: true - provision_container: ({config}) -> - await @lxc.exec - $header: 'Node.js' - code: [0, 42] - command: ''' - dnf install -y tar # Not present on almalinux - bash -l -c "command -v node" && exit 42 - curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash - . ~/.bashrc - nvm install 16 - ''' - container: config.container - trap: true - await @lxc.exec - $header: 'SSH keys' - code: [0, 42] - command: """ - grep "`cat /root/.ssh/id_rsa.pub`" /root/.ssh/authorized_keys && exit 42 - mkdir -p /root/.ssh && chmod 700 /root/.ssh - if [ ! -f /root/.ssh/id_rsa ]; then - ssh-keygen -t rsa -f /root/.ssh/id_rsa -N '' - cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys - fi - """ - container: config.container - trap: true - await @lxc.exec - $header: 'Install FreeIPA' - code: [0, 42] - # Other possibilities to check ipa status: - # echo > /dev/tcp/localhost/443 - # echo admin_pw | kinit admin - command: """ - [ -f /etc/ipa/default.conf ] && exit 42 - # Enable the IDM – Identity Management system module, - # required on AlmaLinux - dnf install -y @idm:DL1 - dnf install -y freeipa-server ipa-server-dns - hostnamectl set-hostname ipa.nikita.local --static - #{[ - 'ipa-server-install', '-U' - # Basic options - "-a admin_pw" - "-p manager_pw" - # The container is named `nikita-ipa` and it is attached to a network - # with the `nikita.local` DNS domain. Thus, the default FQDN is - # `nikita-ipa.nikita.local` and you can do a reverse DNS lookup with - # `dig -x`. - "--hostname ipa.nikita.local" - "--domain nikita.local" - # We can set a different FQDN like `ipa.nikita.local` with `hostnamectl - # set-hostname {fqdn} --static`. However, FreeIPA will complain when it - # starts because the reverse DNS lookup check fail to match the FQDN. A - # possible solution is to have FreeIPA managing the DNS with - # `--setup-dns`. - "--setup-dns --auto-reverse --auto-forwarders" - # Kerberos REALM - "-r NIKITA.LOCAL" - # Chrony doesnt start inside a container, no permission to change clock - # Fatal error : adjtimex(0x8001) failed : Operation not permitted - # See https://bugs.launchpad.net/ubuntu/+source/chrony/+bug/1589780 - "--no-ntp" - ].join ' '} - """ - container: config.container - # ipa-server-install --uninstall - # ipa-server-install -U -a admin_pw -p manager_pw --hostname ipa.nikita.local --domain nikita.local --auto-reverse --setup-dns --auto-forwarders -r NIKITA.LOCAL - await @lxc.exec - $header: 'Immutable DNS' - code: [0, 42] - command: ''' - cat /etc/sysconfig/network-scripts/ifcfg-eth0 | egrep '^PEERDNS=no' && exit 42 - echo 'PEERDNS=no' >> /etc/sysconfig/network-scripts/ifcfg-eth0 - ''' - container: config.container - trap: true -.catch (err) -> - console.error err diff --git a/packages/ipa/env/ipa/index.js b/packages/ipa/env/ipa/index.js new file mode 100644 index 000000000..9a59dd106 --- /dev/null +++ b/packages/ipa/env/ipa/index.js @@ -0,0 +1,174 @@ + +import path from 'node:path'; +import dedent from 'dedent'; +import runner from '@nikitajs/lxd-runner'; +const __dirname = new URL( '.', import.meta.url).pathname; + +// Note: + +// Jan 20th, 2020: upgrading ubuntu to 19.10 on the host vm +// lead to an error while installing freeipa +// complaining that it cannot write into /tmp +// solution involve to run on the host machine +// check: `cat /proc/sys/fs/protected_regular` +// Temporary: `echo '0' > /proc/sys/fs/protected_regular && sysctl -p` +// Permanently: `echo 'fs.protected_regular = 0' >> /etc/sysctl.conf && sysctl -p` + +// Error starting IPA +// command: +// ipactl start +// Starting Directory Service +// Failed to start Directory Service: Command '/bin/systemctl start dirsrv@NIKITA-LOCAL.service' returned non-zero exit status 1 +// Solution: +// Check available space on host machine + +// When adding principal, got Error +// Host 'ipa.nikita.local' does not have corresponding DNS A/AAAA record +// Short term solution: +// Reset the DNS server in resolv.conf with the IPA DNS +// echo 'search nikita.local' > /etc/resolv.conf +// echo 'nameserver 10.10.11.2' >> /etc/resolv.conf +// ipactl restart +// Long term solution: +// Disable the re-generation of resolv.conf by /usr/sbin/dhclient-script +runner({ + cwd: '/nikita/packages/ipa', + container: 'nikita-ipa', + logdir: path.resolve(__dirname, './logs'), + cluster: { + // FreeIPA do a reverse lookup on initialisation + // Using the default bridge yields to the error "The host name + // freeipa.nikita does not match the value freeipa.lxd obtained by + // reverse lookup on IP address fd42:f662:97ea:ba7f:216:3eff:fe1d:96f2%215" + networks: { + nktipapub: { + 'ipv4.address': '10.10.11.1/24', + 'ipv4.nat': true, + 'ipv6.address': 'none', + 'dns.domain': 'nikita.local' + } + }, + containers: { + 'nikita-ipa': { + image: 'images:almalinux/8', + properties: { + 'environment.NIKITA_TEST_MODULE': '/nikita/packages/ipa/env/ipa/test.coffee', + 'raw.idmap': process.env['NIKITA_LXD_IN_VAGRANT'] ? 'both 1000 0' : `both ${process.getuid()} 0` + }, + disk: { + nikitadir: { + path: '/nikita', + source: process.env['NIKITA_HOME'] || path.join(__dirname, '../../../../') + } + }, + nic: { + eth0: { + name: 'eth0', + nictype: 'bridged', + parent: 'nktipapub', + 'ipv4.address': '10.10.11.2' + } + }, + proxy: { + ssh: { + listen: 'tcp:0.0.0.0:2200', + connect: 'tcp:127.0.0.1:22' + }, + ipa_ui_http: { + listen: 'tcp:0.0.0.0:2080', + connect: 'tcp:127.0.0.1:80' + }, + ipa_ui_https: { + listen: 'tcp:0.0.0.0:2443', + connect: 'tcp:127.0.0.1:443' + } + }, + ssh: { + enabled: true + } + } + }, + provision_container: async function({config}) { + await this.lxc.exec({ + $header: 'Node.js', + code: [0, 42], + command: dedent` + dnf install -y tar # Not present on almalinux + bash -l -c "command -v node" && exit 42 + curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash + . ~/.bashrc + nvm install 16 + `, + container: config.container, + trap: true + }); + await this.lxc.exec({ + $header: 'SSH keys', + code: [0, 42], + command: dedent` + grep "\`cat /root/.ssh/id_rsa.pub\`" /root/.ssh/authorized_keys && exit 42 + mkdir -p /root/.ssh && chmod 700 /root/.ssh + if [ ! -f /root/.ssh/id_rsa ]; then + ssh-keygen -t rsa -f /root/.ssh/id_rsa -N '' + cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys + fi + `, + container: config.container, + trap: true + }); + await this.lxc.exec({ + $header: 'Install FreeIPA', + code: [0, 42], + // Other possibilities to check ipa status: + // echo > /dev/tcp/localhost/443 + // echo admin_pw | kinit admin + command: dedent` + [ -f /etc/ipa/default.conf ] && exit 42 + # Enable the IDM – Identity Management system module, + # required on AlmaLinux + dnf install -y @idm:DL1 + dnf install -y freeipa-server ipa-server-dns + hostnamectl set-hostname ipa.nikita.local --static + ${[ + 'ipa-server-install', + '-U', + // Basic options + "-a admin_pw", + "-p manager_pw", + // The container is named `nikita-ipa` and it is attached to a network + // with the `nikita.local` DNS domain. Thus, the default FQDN is + // `nikita-ipa.nikita.local` and you can do a reverse DNS lookup with + // `dig -x`. + "--hostname ipa.nikita.local", + "--domain nikita.local", + // We can set a different FQDN like `ipa.nikita.local` with `hostnamectl + // set-hostname {fqdn} --static`. However, FreeIPA will complain when it + // starts because the reverse DNS lookup check fail to match the FQDN. A + // possible solution is to have FreeIPA managing the DNS with + // `--setup-dns`. + "--setup-dns --auto-reverse --auto-forwarders", + // Kerberos REALM + "-r NIKITA.LOCAL", + // Chrony doesnt start inside a container, no permission to change clock + // Fatal error : adjtimex(0x8001) failed : Operation not permitted + // See https://bugs.launchpad.net/ubuntu/+source/chrony/+bug/1589780 + "--no-ntp" + ].join(' ')} + `, + container: config.container + }); + // ipa-server-install --uninstall + // ipa-server-install -U -a admin_pw -p manager_pw --hostname ipa.nikita.local --domain nikita.local --auto-reverse --setup-dns --auto-forwarders -r NIKITA.LOCAL + await this.lxc.exec({ + $header: 'Immutable DNS', + code: [0, 42], + command: dedent` + cat /etc/sysconfig/network-scripts/ifcfg-eth0 | egrep '^PEERDNS=no' && exit 42 + echo 'PEERDNS=no' >> /etc/sysconfig/network-scripts/ifcfg-eth0 + `, + container: config.container, + trap: true + }); + } + } +}); diff --git a/packages/ipa/env/ipa/test.coffee b/packages/ipa/env/ipa/test.coffee index dcc872768..1d73564b6 100644 --- a/packages/ipa/env/ipa/test.coffee +++ b/packages/ipa/env/ipa/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: ipa: true ipa: diff --git a/packages/ipa/env/run.sh b/packages/ipa/env/run.sh index 4c048cd0c..e3f775e21 100755 --- a/packages/ipa/env/run.sh +++ b/packages/ipa/env/run.sh @@ -3,4 +3,4 @@ set -e cd `pwd`/`dirname ${BASH_SOURCE}` -npx coffee ./env/ipa/index.coffee run +node ./ipa/index.js run diff --git a/packages/ipa/lib/group/add_member/index.js b/packages/ipa/lib/group/add_member/index.js index a9e52f185..d42313915 100644 --- a/packages/ipa/lib/group/add_member/index.js +++ b/packages/ipa/lib/group/add_member/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; const {data} = await this.network.http(config.connection, { diff --git a/packages/ipa/lib/group/add_member/schema.json b/packages/ipa/lib/group/add_member/schema.json index 2bdcf6b15..a8fe4a21c 100644 --- a/packages/ipa/lib/group/add_member/schema.json +++ b/packages/ipa/lib/group/add_member/schema.json @@ -22,7 +22,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/group/del/index.js b/packages/ipa/lib/group/del/index.js index 2c7465b7c..f341b16bb 100644 --- a/packages/ipa/lib/group/del/index.js +++ b/packages/ipa/lib/group/del/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; const {$status: exists} = await this.ipa.group.exists({ diff --git a/packages/ipa/lib/group/del/schema.json b/packages/ipa/lib/group/del/schema.json index b4806e827..85c4178b7 100644 --- a/packages/ipa/lib/group/del/schema.json +++ b/packages/ipa/lib/group/del/schema.json @@ -8,7 +8,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/group/exists/index.js b/packages/ipa/lib/group/exists/index.js index d7bfbc5f4..37f95cfd4 100644 --- a/packages/ipa/lib/group/exists/index.js +++ b/packages/ipa/lib/group/exists/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; try { diff --git a/packages/ipa/lib/group/exists/schema.json b/packages/ipa/lib/group/exists/schema.json index ee6587f90..5613c2b0e 100644 --- a/packages/ipa/lib/group/exists/schema.json +++ b/packages/ipa/lib/group/exists/schema.json @@ -8,7 +8,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/group/index.js b/packages/ipa/lib/group/index.js index 21037989c..cd8d8c8ca 100644 --- a/packages/ipa/lib/group/index.js +++ b/packages/ipa/lib/group/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; const {exists} = await this.ipa.group.exists({ diff --git a/packages/ipa/lib/group/schema.json b/packages/ipa/lib/group/schema.json index ed548e756..5c2bd91b7 100644 --- a/packages/ipa/lib/group/schema.json +++ b/packages/ipa/lib/group/schema.json @@ -13,7 +13,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/group/show/index.js b/packages/ipa/lib/group/show/index.js index aef14263c..61571f385 100644 --- a/packages/ipa/lib/group/show/index.js +++ b/packages/ipa/lib/group/show/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; const {data} = await this.network.http(config.connection, { diff --git a/packages/ipa/lib/group/show/schema.json b/packages/ipa/lib/group/show/schema.json index 3fdc0a253..656ca80c9 100644 --- a/packages/ipa/lib/group/show/schema.json +++ b/packages/ipa/lib/group/show/schema.json @@ -8,7 +8,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/register.js b/packages/ipa/lib/register.js index 8ee9e8e80..dfadced2e 100644 --- a/packages/ipa/lib/register.js +++ b/packages/ipa/lib/register.js @@ -1,42 +1,35 @@ // Dependencies -require('@nikitajs/network/lib/register'); -const registry = require('@nikitajs/core/lib/registry'); +import '@nikitajs/network/register'; +import registry from "@nikitajs/core/registry"; // Action registration -module.exports = { +const actions = { ipa: { group: { - '': '@nikitajs/ipa/lib/group', - add_member: '@nikitajs/ipa/lib/group/add_member', - del: '@nikitajs/ipa/lib/group/del', - exists: '@nikitajs/ipa/lib/group/exists', - show: '@nikitajs/ipa/lib/group/show' + '': '@nikitajs/ipa/group', + add_member: '@nikitajs/ipa/group/add_member', + del: '@nikitajs/ipa/group/del', + exists: '@nikitajs/ipa/group/exists', + show: '@nikitajs/ipa/group/show' }, user: { - '': '@nikitajs/ipa/lib/user', - disable: '@nikitajs/ipa/lib/user/disable', - del: '@nikitajs/ipa/lib/user/del', - enable: '@nikitajs/ipa/lib/user/enable', - exists: '@nikitajs/ipa/lib/user/exists', - find: '@nikitajs/ipa/lib/user/find', - show: '@nikitajs/ipa/lib/user/show', - status: '@nikitajs/ipa/lib/user/status' + '': '@nikitajs/ipa/user', + disable: '@nikitajs/ipa/user/disable', + del: '@nikitajs/ipa/user/del', + enable: '@nikitajs/ipa/user/enable', + exists: '@nikitajs/ipa/user/exists', + find: '@nikitajs/ipa/user/find', + show: '@nikitajs/ipa/user/show', + status: '@nikitajs/ipa/user/status' }, service: { - '': '@nikitajs/ipa/lib/service', - del: '@nikitajs/ipa/lib/service/del', - exists: '@nikitajs/ipa/lib/service/exists', - show: '@nikitajs/ipa/lib/service/show' + '': '@nikitajs/ipa/service', + del: '@nikitajs/ipa/service/del', + exists: '@nikitajs/ipa/service/exists', + show: '@nikitajs/ipa/service/show' } } }; -(async function() { - try { - return (await registry.register(module.exports)); - } catch (error) { - console.error(error.stack); - return process.exit(1); - } -})(); +await registry.register(actions) diff --git a/packages/ipa/lib/service/del/index.js b/packages/ipa/lib/service/del/index.js index a2eb02da5..34eba0bfb 100644 --- a/packages/ipa/lib/service/del/index.js +++ b/packages/ipa/lib/service/del/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; const {$status: exists} = await this.ipa.service.exists({ diff --git a/packages/ipa/lib/service/del/schema.json b/packages/ipa/lib/service/del/schema.json index f358aae93..ca623896c 100644 --- a/packages/ipa/lib/service/del/schema.json +++ b/packages/ipa/lib/service/del/schema.json @@ -8,7 +8,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/service/exists/index.js b/packages/ipa/lib/service/exists/index.js index 3248ad7e6..401af6005 100644 --- a/packages/ipa/lib/service/exists/index.js +++ b/packages/ipa/lib/service/exists/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; try { diff --git a/packages/ipa/lib/service/exists/schema.json b/packages/ipa/lib/service/exists/schema.json index cac27750b..ed93647ac 100644 --- a/packages/ipa/lib/service/exists/schema.json +++ b/packages/ipa/lib/service/exists/schema.json @@ -8,7 +8,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/service/index.js b/packages/ipa/lib/service/index.js index f06abf04a..d825d1c97 100644 --- a/packages/ipa/lib/service/index.js +++ b/packages/ipa/lib/service/index.js @@ -1,39 +1,33 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; -// ## Schema definitions -var handler; - -// ## Handler -handler = async function({config}) { - config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; - const {data} = await this.network.http(config.connection, { - negotiate: true, - method: 'POST', - data: { - method: "service_add/1", - params: [[config.principal], {}], - id: 0 - } - }); - let status = true; - if (data.error !== null) { - if (data.error.code !== 4002) { // principal alredy exists - const error = Error(data.error.message); - error.code = data.error.code; - throw error; +// Action +export default { + handler: async function({config}) { + config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; + const {data} = await this.network.http(config.connection, { + negotiate: true, + method: 'POST', + data: { + method: "service_add/1", + params: [[config.principal], {}], + id: 0 + } + }); + let status = true; + if (data.error !== null) { + if (data.error.code !== 4002) { // principal alredy exists + const error = Error(data.error.message); + error.code = data.error.code; + throw error; + } + status = false; } - status = false; - } - return { - $status: status - }; -}; - -// ## Exports -module.exports = { - handler: handler, + return { + $status: status + }; + }, metadata: { definitions: definitions } diff --git a/packages/ipa/lib/service/schema.json b/packages/ipa/lib/service/schema.json index f7779bb39..d356a7612 100644 --- a/packages/ipa/lib/service/schema.json +++ b/packages/ipa/lib/service/schema.json @@ -8,7 +8,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/service/show/index.js b/packages/ipa/lib/service/show/index.js index 58d204bb2..2a44e980c 100644 --- a/packages/ipa/lib/service/show/index.js +++ b/packages/ipa/lib/service/show/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; const {data} = await this.network.http(config.connection, { diff --git a/packages/ipa/lib/service/show/schema.json b/packages/ipa/lib/service/show/schema.json index 67a647c5c..8d16a799d 100644 --- a/packages/ipa/lib/service/show/schema.json +++ b/packages/ipa/lib/service/show/schema.json @@ -8,7 +8,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/user/del/index.js b/packages/ipa/lib/user/del/index.js index 367ece904..f36e6d724 100644 --- a/packages/ipa/lib/user/del/index.js +++ b/packages/ipa/lib/user/del/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; const {$status} = await this.ipa.user.exists({ diff --git a/packages/ipa/lib/user/del/schema.json b/packages/ipa/lib/user/del/schema.json index db66f091f..0fcf79d92 100644 --- a/packages/ipa/lib/user/del/schema.json +++ b/packages/ipa/lib/user/del/schema.json @@ -12,7 +12,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/user/disable/index.js b/packages/ipa/lib/user/disable/index.js index 5806cf9df..4fbe2424d 100644 --- a/packages/ipa/lib/user/disable/index.js +++ b/packages/ipa/lib/user/disable/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; const { diff --git a/packages/ipa/lib/user/disable/schema.json b/packages/ipa/lib/user/disable/schema.json index db66f091f..0fcf79d92 100644 --- a/packages/ipa/lib/user/disable/schema.json +++ b/packages/ipa/lib/user/disable/schema.json @@ -12,7 +12,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/user/enable/index.js b/packages/ipa/lib/user/enable/index.js index 946a4186b..49cf0eb46 100644 --- a/packages/ipa/lib/user/enable/index.js +++ b/packages/ipa/lib/user/enable/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; const { diff --git a/packages/ipa/lib/user/enable/schema.json b/packages/ipa/lib/user/enable/schema.json index db66f091f..0fcf79d92 100644 --- a/packages/ipa/lib/user/enable/schema.json +++ b/packages/ipa/lib/user/enable/schema.json @@ -12,7 +12,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/user/exists/index.js b/packages/ipa/lib/user/exists/index.js index b893479e4..5e750e6de 100644 --- a/packages/ipa/lib/user/exists/index.js +++ b/packages/ipa/lib/user/exists/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; try { diff --git a/packages/ipa/lib/user/exists/schema.json b/packages/ipa/lib/user/exists/schema.json index ac9e4aba3..0514b6a1e 100644 --- a/packages/ipa/lib/user/exists/schema.json +++ b/packages/ipa/lib/user/exists/schema.json @@ -12,7 +12,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/user/find/index.js b/packages/ipa/lib/user/find/index.js index a5a133cd6..50e763e5c 100644 --- a/packages/ipa/lib/user/find/index.js +++ b/packages/ipa/lib/user/find/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; const {data} = await this.network.http(config.connection, { diff --git a/packages/ipa/lib/user/find/schema.json b/packages/ipa/lib/user/find/schema.json index 102f94a0f..c97c8a364 100644 --- a/packages/ipa/lib/user/find/schema.json +++ b/packages/ipa/lib/user/find/schema.json @@ -4,7 +4,7 @@ "properties": { "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/user/index.js b/packages/ipa/lib/user/index.js index 8097da83b..dd9064632 100644 --- a/packages/ipa/lib/user/index.js +++ b/packages/ipa/lib/user/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; const {exists} = await this.ipa.user.exists({ diff --git a/packages/ipa/lib/user/schema.json b/packages/ipa/lib/user/schema.json index dd4ea8656..961ad57f1 100644 --- a/packages/ipa/lib/user/schema.json +++ b/packages/ipa/lib/user/schema.json @@ -39,7 +39,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/user/show/index.js b/packages/ipa/lib/user/show/index.js index 9b5c7d68a..57f8d1745 100644 --- a/packages/ipa/lib/user/show/index.js +++ b/packages/ipa/lib/user/show/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; const {data} = await this.network.http(config.connection, { diff --git a/packages/ipa/lib/user/show/schema.json b/packages/ipa/lib/user/show/schema.json index 8284283cd..f4cdd13e6 100644 --- a/packages/ipa/lib/user/show/schema.json +++ b/packages/ipa/lib/user/show/schema.json @@ -12,7 +12,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/lib/user/status/index.js b/packages/ipa/lib/user/status/index.js index 751b8ece9..2d299cc44 100644 --- a/packages/ipa/lib/user/status/index.js +++ b/packages/ipa/lib/user/status/index.js @@ -1,15 +1,12 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { - var base, data, error; - if ((base = config.connection.http_headers)['Referer'] == null) { - base['Referer'] = config.connection.referer || config.connection.url; - } - ({data} = (await this.network.http(config.connection, { + config.connection.http_headers['Referer'] ??= config.connection.referer || config.connection.url; + const {data} = await this.network.http(config.connection, { negotiate: true, method: 'POST', data: { @@ -17,9 +14,9 @@ module.exports = { params: [[config.uid], {}], id: 0 } - }))); + }); if (data.error) { - error = Error(data.error.message); + const error = Error(data.error.message); error.code = data.error.code; throw error; } else { diff --git a/packages/ipa/lib/user/status/schema.json b/packages/ipa/lib/user/status/schema.json index 8284283cd..f4cdd13e6 100644 --- a/packages/ipa/lib/user/status/schema.json +++ b/packages/ipa/lib/user/status/schema.json @@ -12,7 +12,7 @@ }, "connection": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "required": [ "principal", "password" diff --git a/packages/ipa/package.json b/packages/ipa/package.json index a22fc4bae..8a443a901 100644 --- a/packages/ipa/package.json +++ b/packages/ipa/package.json @@ -1,5 +1,6 @@ { "name": "@nikitajs/ipa", + "version": "1.0.0-alpha.3", "description": "Provides Nikita actions for various FreeIPA operations.", "keywords": [ "nikita", @@ -10,7 +11,6 @@ "tls", "ssl" ], - "version": "1.0.0-alpha.3", "author": "David Worms ", "bugs": { "url": "https://github.com/adaltas/node-nikita/issues" @@ -41,9 +41,6 @@ "dependencies": { "object-diff": "^0.0.4" }, - "peerDependencies": { - "@nikitajs/core": "^1.0.0-alpha.1" - }, "devDependencies": { "@nikitajs/lxd-runner": "^1.0.0-alpha.0", "coffeescript": "^2.7.0", @@ -55,26 +52,33 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + "./register": "./lib/register.js", + "./*": "./lib/*/index.js" + }, + "homepage": "https://nikita.js.org/", "files": [ "/lib" ], + "license": "MIT", "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/ipa/lib/register" - ], "inline-diffs": true, - "timeout": 20000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/ipa/register", + "should" + ], + "throw-deprecation": true, + "timeout": 20000 + }, + "peerDependencies": { + "@nikitajs/core": "^1.0.0-alpha.1" }, "publishConfig": { "access": "public" }, - "homepage": "https://nikita.js.org/", - "license": "MIT", "repository": { "type": "git", "url": "https://github.com/adaltas/node-nikita", @@ -84,5 +88,6 @@ "test": "npm run test:local && npm run test:env", "test:env": "env/run.sh", "test:local": "mocha 'test/**/*.coffee'" - } + }, + "type": "module" } diff --git a/packages/ipa/test.sample.coffee b/packages/ipa/test.sample.coffee index d8a4c3827..7162d9da4 100644 --- a/packages/ipa/test.sample.coffee +++ b/packages/ipa/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: ipa: false config: [ @@ -12,5 +12,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/ipa/test/group/add_member.coffee b/packages/ipa/test/group/add_member.coffee index 138d803b2..d6a2f1ec7 100644 --- a/packages/ipa/test/group/add_member.coffee +++ b/packages/ipa/test/group/add_member.coffee @@ -1,37 +1,38 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) -return unless tags.ipa describe 'ipa.group.add_member', -> + return unless test.tags.ipa they 'add_member to a group', ({ssh}) -> gidnumber = null nikita $ssh: ssh , -> - await @ipa.group.del connection: ipa, [ + await @ipa.group.del connection: test.ipa, [ cn: 'group_add_member' , cn: 'group_add_member_user' ] - await @ipa.user.del connection: ipa, + await @ipa.user.del connection: test.ipa, uid: 'group_add_member_user' - {result} = await @ipa.group connection: ipa, + {result} = await @ipa.group connection: test.ipa, cn: 'group_add_member' gidnumber = result.gidnumber - await @ipa.user connection: ipa, + await @ipa.user connection: test.ipa, uid: 'group_add_member_user' attributes: givenname: 'Firstname' sn: 'Lastname' mail: [ 'user@nikita.js.org' ] - {$status} = await @ipa.group.add_member connection: ipa, + {$status} = await @ipa.group.add_member connection: test.ipa, cn: 'group_add_member' attributes: user: ['group_add_member_user'] $status.should.be.true() - {result} = await @ipa.group.show connection: ipa, + {result} = await @ipa.group.show connection: test.ipa, cn: 'group_add_member' result.gidnumber.should.eql gidnumber diff --git a/packages/ipa/test/group/del.coffee b/packages/ipa/test/group/del.coffee index a92e2ee37..6e3d8b158 100644 --- a/packages/ipa/test/group/del.coffee +++ b/packages/ipa/test/group/del.coffee @@ -1,21 +1,21 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.group.del', -> + return unless test.tags.ipa they 'delete a group', ({ssh}) -> nikita $ssh: ssh , -> - @ipa.group connection: ipa, + @ipa.group connection: test.ipa, cn: 'group_del' - {$status} = await @ipa.group.del connection: ipa, + {$status} = await @ipa.group.del connection: test.ipa, cn: 'group_del' $status.should.be.true() - {$status} = await @ipa.group.del connection: ipa, + {$status} = await @ipa.group.del connection: test.ipa, cn: 'group_del' $status.should.be.false() diff --git a/packages/ipa/test/group/exists.coffee b/packages/ipa/test/group/exists.coffee index f41d38945..7a2d9a243 100644 --- a/packages/ipa/test/group/exists.coffee +++ b/packages/ipa/test/group/exists.coffee @@ -1,19 +1,19 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.group.exists', -> + return unless test.tags.ipa they 'group doesnt exist', ({ssh}) -> nikita $ssh: ssh , -> - @ipa.group.del connection: ipa, + @ipa.group.del connection: test.ipa, cn: 'group_exists' - {$status, exists} = await @ipa.group.exists connection: ipa, + {$status, exists} = await @ipa.group.exists connection: test.ipa, cn: 'group_exists' $status.should.be.false() exists.should.be.false() @@ -22,9 +22,9 @@ describe 'ipa.group.exists', -> nikita $ssh: ssh , -> - @ipa.group connection: ipa, + @ipa.group connection: test.ipa, cn: 'group_exists' - {$status, exists} = await @ipa.group.exists connection: ipa, + {$status, exists} = await @ipa.group.exists connection: test.ipa, cn: 'admins' $status.should.be.true() exists.should.be.true() diff --git a/packages/ipa/test/group/index.coffee b/packages/ipa/test/group/index.coffee index 6de62d71d..357e98c16 100644 --- a/packages/ipa/test/group/index.coffee +++ b/packages/ipa/test/group/index.coffee @@ -1,23 +1,23 @@ -{merge} = require 'mixme' -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import {merge} from 'mixme' +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.group', -> + return unless test.tags.ipa they 'create a group', ({ssh}) -> nikita $ssh: ssh , -> - @ipa.group.del connection: ipa, + @ipa.group.del connection: test.ipa, cn: 'group_add' - {$status} = await @ipa.group connection: ipa, + {$status} = await @ipa.group connection: test.ipa, cn: 'group_add' $status.should.be.true() - {$status} = await @ipa.group connection: ipa, + {$status} = await @ipa.group connection: test.ipa, cn: 'group_add' $status.should.be.false() @@ -25,12 +25,12 @@ describe 'ipa.group', -> nikita $ssh: ssh , -> - @ipa.group.del connection: ipa, + @ipa.group.del connection: test.ipa, cn: 'group_add' - {$status} = await @ipa.group connection: ipa, + {$status} = await @ipa.group connection: test.ipa, cn: 'group_add' $status.should.be.true() - {$status} = await @ipa.group connection: ipa, + {$status} = await @ipa.group connection: test.ipa, cn: 'group_add' attributes: description: 'group_add description' @@ -40,9 +40,9 @@ describe 'ipa.group', -> nikita $ssh: ssh , -> - @ipa.group.del connection: ipa, + @ipa.group.del connection: test.ipa, cn: 'group_add' - {$status, result} = await @ipa.group connection: ipa, + {$status, result} = await @ipa.group connection: test.ipa, cn: 'group_add' $status.should.be.true() result.gidnumber.length.should.eql 1 @@ -66,11 +66,11 @@ describe 'ipa.group', -> nikita $ssh: ssh , -> - @ipa.group.del connection: ipa, + @ipa.group.del connection: test.ipa, cn: 'group_add' - @ipa.group connection: ipa, + @ipa.group connection: test.ipa, cn: 'group_add' - {$status, result} = await @ipa.group connection: ipa, + {$status, result} = await @ipa.group connection: test.ipa, cn: 'group_add' $status.should.be.false() result.gidnumber.length.should.eql 1 diff --git a/packages/ipa/test/group/show.coffee b/packages/ipa/test/group/show.coffee index 10944be37..6fa91fc28 100644 --- a/packages/ipa/test/group/show.coffee +++ b/packages/ipa/test/group/show.coffee @@ -1,17 +1,17 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.group.show', -> + return unless test.tags.ipa they 'get single group', ({ssh}) -> nikita $ssh: ssh , -> - {result} = await @ipa.group.show connection: ipa, + {result} = await @ipa.group.show connection: test.ipa, cn: 'admins' result.gidnumber[0].should.match /\d+/ result.gidnumber[0] = '0000000000' @@ -26,7 +26,7 @@ describe 'ipa.group.show', -> nikita $ssh: ssh , -> - @ipa.group.show connection: ipa, + @ipa.group.show connection: test.ipa, cn: 'missing' .should.be.rejectedWith code: 4001 diff --git a/packages/ipa/test/service/del.coffee b/packages/ipa/test/service/del.coffee index 7b4d6acac..5bd0a1f07 100644 --- a/packages/ipa/test/service/del.coffee +++ b/packages/ipa/test/service/del.coffee @@ -1,19 +1,19 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.service.del', -> + return unless test.tags.ipa they 'delete a missing service', ({ssh}) -> nikita $ssh: ssh , -> - await @ipa.service.del connection: ipa, + await @ipa.service.del connection: test.ipa, principal: 'test_service_del' - {$status} = await @ipa.service.del connection: ipa, + {$status} = await @ipa.service.del connection: test.ipa, principal: 'test_service_del' $status.should.be.false() @@ -21,8 +21,8 @@ describe 'ipa.service.del', -> nikita $ssh: ssh , -> - await @ipa.service connection: ipa, + await @ipa.service connection: test.ipa, principal: 'test_service_del/ipa.nikita.local' - {$status} = await @ipa.service.del connection: ipa, + {$status} = await @ipa.service.del connection: test.ipa, principal: 'test_service_del/ipa.nikita.local' $status.should.be.true() diff --git a/packages/ipa/test/service/exists.coffee b/packages/ipa/test/service/exists.coffee index 207e1d061..da5ce06b6 100644 --- a/packages/ipa/test/service/exists.coffee +++ b/packages/ipa/test/service/exists.coffee @@ -1,19 +1,19 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.service.exists', -> + return unless test.tags.ipa they 'service doesnt exist', ({ssh}) -> nikita $ssh: ssh , -> - @ipa.service.del connection: ipa, + @ipa.service.del connection: test.ipa, principal: 'service_exists/ipa.nikita.local' - {$status, exists} = await @ipa.service.exists connection: ipa, + {$status, exists} = await @ipa.service.exists connection: test.ipa, principal: 'service_exists/ipa.nikita.local' $status.should.be.false() exists.should.be.false() @@ -22,11 +22,11 @@ describe 'ipa.service.exists', -> nikita $ssh: ssh , -> - @ipa.service connection: ipa, + @ipa.service connection: test.ipa, principal: 'service_exists/ipa.nikita.local' - {$status, exists} = await @ipa.service.exists connection: ipa, + {$status, exists} = await @ipa.service.exists connection: test.ipa, principal: 'service_exists/ipa.nikita.local' $status.should.be.true() exists.should.be.true() - @ipa.service.del connection: ipa, + @ipa.service.del connection: test.ipa, principal: 'service_exists/ipa.nikita.local' diff --git a/packages/ipa/test/service/index.coffee b/packages/ipa/test/service/index.coffee index 5e14cab13..f35817b83 100644 --- a/packages/ipa/test/service/index.coffee +++ b/packages/ipa/test/service/index.coffee @@ -1,12 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa - +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.service', -> + return unless test.tags.ipa they 'create a service', ({ssh}) -> nikita @@ -14,14 +13,14 @@ describe 'ipa.service', -> , -> @ipa.service.del principal: 'service_add/ipa.nikita.local', - connection: ipa + connection: test.ipa {$status} = await @ipa.service principal: 'service_add/ipa.nikita.local', - connection: ipa + connection: test.ipa $status.should.be.true() @ipa.service.del principal: 'service_add/ipa.nikita.local', - connection: ipa + connection: test.ipa they 'create an existing service', ({ssh}) -> nikita @@ -29,14 +28,14 @@ describe 'ipa.service', -> , -> @ipa.service.del principal: 'service_add/ipa.nikita.local', - connection: ipa + connection: test.ipa @ipa.service principal: 'service_add/ipa.nikita.local', - connection: ipa + connection: test.ipa {$status} = await @ipa.service principal: 'service_add/ipa.nikita.local', - connection: ipa + connection: test.ipa $status.should.be.false() @ipa.service.del principal: 'service_add/ipa.nikita.local', - connection: ipa + connection: test.ipa diff --git a/packages/ipa/test/service/show.coffee b/packages/ipa/test/service/show.coffee index ebacf2503..c50d77e58 100644 --- a/packages/ipa/test/service/show.coffee +++ b/packages/ipa/test/service/show.coffee @@ -1,17 +1,17 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.service.show', -> + return unless test.tags.ipa they 'get single service', ({ssh}) -> nikita $ssh: ssh , -> - {result} = await @ipa.service.show connection: ipa, + {result} = await @ipa.service.show connection: test.ipa, principal: 'HTTP/ipa.nikita.local' result.dn.should.eql 'krbprincipalname=HTTP/ipa.nikita.local@NIKITA.LOCAL,cn=services,cn=accounts,dc=nikita,dc=local' @@ -19,7 +19,7 @@ describe 'ipa.service.show', -> nikita $ssh: ssh , -> - @ipa.service.show connection: ipa, + @ipa.service.show connection: test.ipa, principal: 'missing/ipa.nikita.local' .should.be.rejectedWith code: 4001 diff --git a/packages/ipa/test/test.coffee b/packages/ipa/test/test.coffee index f17f2b1bb..ccc602447 100644 --- a/packages/ipa/test/test.coffee +++ b/packages/ipa/test/test.coffee @@ -1,14 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config +export default config.default diff --git a/packages/ipa/test/user/del.coffee b/packages/ipa/test/user/del.coffee index f1d5d492f..8a12f9913 100644 --- a/packages/ipa/test/user/del.coffee +++ b/packages/ipa/test/user/del.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.user.del', -> + return unless test.tags.ipa describe 'schema', -> they 'use `username` as alias for `uid`', ({ssh}) -> nikita $ssh: ssh - .ipa.user.del connection: ipa, + .ipa.user.del connection: test.ipa, username: 'test_user_del' describe 'action', -> @@ -21,9 +21,9 @@ describe 'ipa.user.del', -> nikita $ssh: ssh , -> - @ipa.user.del connection: ipa, + @ipa.user.del connection: test.ipa, uid: 'test_user_del' - {$status} = await @ipa.user.del connection: ipa, + {$status} = await @ipa.user.del connection: test.ipa, uid: 'test_user_del' $status.should.be.false() @@ -31,7 +31,7 @@ describe 'ipa.user.del', -> nikita $ssh: ssh , -> - @ipa.user connection: ipa, + @ipa.user connection: test.ipa, uid: 'test_user_del' attributes: givenname: 'User' @@ -39,6 +39,6 @@ describe 'ipa.user.del', -> mail: [ 'test_user_del@nikita.js.org' ] - {$status} = await @ipa.user.del connection: ipa, + {$status} = await @ipa.user.del connection: test.ipa, uid: 'test_user_del' $status.should.be.true() diff --git a/packages/ipa/test/user/disable.coffee b/packages/ipa/test/user/disable.coffee index 5934d1a31..5d3797c8f 100644 --- a/packages/ipa/test/user/disable.coffee +++ b/packages/ipa/test/user/disable.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.user.disable', -> + return unless test.tags.ipa describe 'schema', -> they 'use `username` as alias for `uid`', ({ssh}) -> nikita $ssh: ssh - .ipa.user.disable connection: ipa, + .ipa.user.disable connection: test.ipa, username: 'test_user_disable' , ({config: {uid}}) -> uid.should.eql 'test_user_disable' @@ -23,7 +23,7 @@ describe 'ipa.user.disable', -> nikita $ssh: ssh , -> - @ipa.user.disable connection: ipa, + @ipa.user.disable connection: test.ipa, uid: 'test_user_disable_missing' .should.be.rejectedWith code: 4001 @@ -33,10 +33,10 @@ describe 'ipa.user.disable', -> nikita $ssh: ssh , -> - await @ipa.user.del connection: ipa, + await @ipa.user.del connection: test.ipa, $relax: true uid: 'test_user_disable_active' - await @ipa.user connection: ipa, + await @ipa.user connection: test.ipa, uid: 'test_user_disable_active' attributes: givenname: 'User' @@ -44,7 +44,7 @@ describe 'ipa.user.disable', -> mail: [ 'test_user_disable@nikita.js.org' ] - {$status} = await @ipa.user.disable connection: ipa, + {$status} = await @ipa.user.disable connection: test.ipa, uid: 'test_user_disable_active' $status.should.be.true() @@ -52,10 +52,10 @@ describe 'ipa.user.disable', -> nikita $ssh: ssh , -> - await @ipa.user.del connection: ipa, + await @ipa.user.del connection: test.ipa, $relax: true uid: 'test_user_disable_inactive' - await @ipa.user connection: ipa, + await @ipa.user connection: test.ipa, uid: 'test_user_disable_inactive' attributes: givenname: 'User' @@ -63,8 +63,8 @@ describe 'ipa.user.disable', -> mail: [ 'test_user_disable@nikita.js.org' ] - await @ipa.user.disable connection: ipa, + await @ipa.user.disable connection: test.ipa, uid: 'test_user_disable_inactive' - {$status} = await @ipa.user.disable connection: ipa, + {$status} = await @ipa.user.disable connection: test.ipa, uid: 'test_user_disable_inactive' $status.should.be.false() diff --git a/packages/ipa/test/user/enable.coffee b/packages/ipa/test/user/enable.coffee index ba3c952c6..308f27867 100644 --- a/packages/ipa/test/user/enable.coffee +++ b/packages/ipa/test/user/enable.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.user.enable', -> + return unless test.tags.ipa describe 'schema', -> they 'use `username` as alias for `uid`', ({ssh}) -> nikita $ssh: ssh - .ipa.user.enable connection: ipa, + .ipa.user.enable connection: test.ipa, username: 'test_user_enable' , ({config: {uid}}) -> uid.should.eql 'test_user_enable' @@ -23,7 +23,7 @@ describe 'ipa.user.enable', -> nikita $ssh: ssh , -> - @ipa.user.enable connection: ipa, + @ipa.user.enable connection: test.ipa, uid: 'test_user_enable_missing' .should.be.rejectedWith code: 4001 @@ -33,10 +33,10 @@ describe 'ipa.user.enable', -> nikita $ssh: ssh , -> - await @ipa.user.del connection: ipa, + await @ipa.user.del connection: test.ipa, $relax: true uid: 'test_user_enable_active' - await @ipa.user connection: ipa, + await @ipa.user connection: test.ipa, uid: 'test_user_enable_active' attributes: givenname: 'User' @@ -45,7 +45,7 @@ describe 'ipa.user.enable', -> 'test_user_enable@nikita.js.org' ] nsaccountlock: true - {$status} = await @ipa.user.enable connection: ipa, + {$status} = await @ipa.user.enable connection: test.ipa, uid: 'test_user_enable_active' $status.should.be.true() @@ -53,10 +53,10 @@ describe 'ipa.user.enable', -> nikita $ssh: ssh , -> - await @ipa.user.del connection: ipa, + await @ipa.user.del connection: test.ipa, $relax: true uid: 'test_user_enable_inactive' - await @ipa.user connection: ipa, + await @ipa.user connection: test.ipa, uid: 'test_user_enable_inactive' attributes: givenname: 'User' @@ -65,8 +65,8 @@ describe 'ipa.user.enable', -> 'test_user_enable@nikita.js.org' ] nsaccountlock: true - await @ipa.user.enable connection: ipa, + await @ipa.user.enable connection: test.ipa, uid: 'test_user_enable_inactive' - {$status} = await @ipa.user.enable connection: ipa, + {$status} = await @ipa.user.enable connection: test.ipa, uid: 'test_user_enable_inactive' $status.should.be.false() diff --git a/packages/ipa/test/user/exists.coffee b/packages/ipa/test/user/exists.coffee index 4d1c0ca45..8430e79a3 100644 --- a/packages/ipa/test/user/exists.coffee +++ b/packages/ipa/test/user/exists.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.user.exists', -> + return unless test.tags.ipa describe 'schema', -> they 'use `username` as alias for `uid`', ({ssh}) -> nikita $ssh: ssh - .ipa.user.exists connection: ipa, + .ipa.user.exists connection: test.ipa, username: 'user_exists' describe 'action', -> @@ -21,9 +21,9 @@ describe 'ipa.user.exists', -> nikita $ssh: ssh , -> - @ipa.user.del connection: ipa, + @ipa.user.del connection: test.ipa, uid: 'user_exists' - {$status, exists} = await @ipa.user.exists connection: ipa, + {$status, exists} = await @ipa.user.exists connection: test.ipa, uid: 'user_exists' $status.should.be.false() exists.should.be.false() @@ -32,7 +32,7 @@ describe 'ipa.user.exists', -> nikita $ssh: ssh , -> - @ipa.user connection: ipa, + @ipa.user connection: test.ipa, uid: 'user_exists' attributes: givenname: 'Firstname' @@ -40,7 +40,7 @@ describe 'ipa.user.exists', -> mail: [ 'user@nikita.js.org' ] - {$status, exists} = await @ipa.user.exists connection: ipa, + {$status, exists} = await @ipa.user.exists connection: test.ipa, uid: 'user_exists' $status.should.be.true() exists.should.be.true() diff --git a/packages/ipa/test/user/find.coffee b/packages/ipa/test/user/find.coffee index e10eee8e1..d204c81c8 100644 --- a/packages/ipa/test/user/find.coffee +++ b/packages/ipa/test/user/find.coffee @@ -1,31 +1,30 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) delete_users = -> - @ipa.user.del connection: ipa, + @ipa.user.del connection: test.ipa, uid: 'user_find_1' - @ipa.user.del connection: ipa, + @ipa.user.del connection: test.ipa, uid: 'user_find_2' - @ipa.user.del connection: ipa, + @ipa.user.del connection: test.ipa, uid: 'user_find_3' create_users = -> - @ipa.user connection: ipa, + @ipa.user connection: test.ipa, uid: 'user_find_1' attributes: givenname: 'Firstname1' sn: 'Lastname1' mail: [ 'user_find_1@nikita.js.org' ] - @ipa.user connection: ipa, + @ipa.user connection: test.ipa, uid: 'user_find_2' attributes: givenname: 'Firstname2' sn: 'Lastname2' mail: [ 'user_find_2@nikita.js.org' ] - @ipa.user connection: ipa, + @ipa.user connection: test.ipa, uid: 'user_find_3' attributes: givenname: 'Firstname3' @@ -33,6 +32,7 @@ create_users = -> mail: [ 'user_find_3@nikita.js.org' ] describe 'ipa.user.find', -> + return unless test.tags.ipa they 'all users', ({ssh}) -> nikita @@ -40,7 +40,7 @@ describe 'ipa.user.find', -> , -> @call delete_users @call create_users - {result} = await @ipa.user.find connection: ipa + {result} = await @ipa.user.find connection: test.ipa result .map (user) -> user.mail?[0] .filter (mail) -> @@ -58,13 +58,13 @@ describe 'ipa.user.find', -> , -> @call delete_users @call create_users - @ipa.group connection: ipa, + @ipa.group connection: test.ipa, cn: 'user_find_group' - @ipa.group.add_member connection: ipa, + @ipa.group.add_member connection: test.ipa, cn: 'user_find_group' attributes: user: ['user_find_1', 'user_find_3'] - {result} = await @ipa.user.find connection: ipa, + {result} = await @ipa.user.find connection: test.ipa, criterias: in_group: ['user_find_group'] result diff --git a/packages/ipa/test/user/index.coffee b/packages/ipa/test/user/index.coffee index d021cfd95..199f40d61 100644 --- a/packages/ipa/test/user/index.coffee +++ b/packages/ipa/test/user/index.coffee @@ -1,9 +1,8 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) userMatch = sn: [ 'Lastname' ], @@ -21,6 +20,7 @@ userMatch = memberof_group: [ 'ipausers' ] describe 'ipa.user', -> + return unless test.tags.ipa describe 'schema', -> @@ -61,7 +61,7 @@ describe 'ipa.user', -> givenname: 'Firstname' sn: 'Lastname' mail: 'user@nikita.js.org' - connection: ipa + connection: test.ipa describe 'action', -> @@ -71,14 +71,14 @@ describe 'ipa.user', -> , -> @ipa.user.del uid: 'user_add' - connection: ipa + connection: test.ipa {$status, result} = await @ipa.user uid: 'user_add' attributes: givenname: 'Firstname' sn: 'Lastname' mail: [ 'user@nikita.js.org' ] - connection: ipa + connection: test.ipa $status.should.be.true() result.should.match userMatch @@ -86,7 +86,7 @@ describe 'ipa.user', -> nikita $ssh: ssh , -> - @ipa.user.del connection: ipa, + @ipa.user.del connection: test.ipa, uid: 'user_add' @ipa.user uid: 'user_add' @@ -94,14 +94,14 @@ describe 'ipa.user', -> givenname: 'Firstname 1' sn: 'Lastname' mail: [ 'user@nikita.js.org' ] - connection: ipa + connection: test.ipa {$status, result} = await @ipa.user uid: 'user_add' attributes: givenname: 'Firstname 2' sn: 'Lastname' mail: [ 'user@nikita.js.org' ] - connection: ipa + connection: test.ipa $status.should.be.true() result.should.match {...userMatch, givenname: ['Firstname 2']} @@ -111,21 +111,21 @@ describe 'ipa.user', -> , -> @ipa.user.del uid: 'user_add' - connection: ipa + connection: test.ipa {$status} = await @ipa.user uid: 'user_add' attributes: givenname: 'Firstname' sn: 'Lastname' mail: [ 'user@nikita.js.org' ] - connection: ipa + connection: test.ipa {$status, result} = await @ipa.user uid: 'user_add' attributes: givenname: 'Firstname' sn: 'Lastname' mail: [ 'user@nikita.js.org' ] - connection: ipa + connection: test.ipa $status.should.be.false() result.should.match userMatch @@ -134,7 +134,7 @@ describe 'ipa.user', -> $ssh: ssh , -> @ipa.user.del - connection: ipa + connection: test.ipa uid: 'user_add' @ipa.user attributes: @@ -142,13 +142,13 @@ describe 'ipa.user', -> sn: 'Lastname' mail: [ 'user@nikita.js.org' ] userpassword: 'toto' - connection: ipa + connection: test.ipa uid: 'user_add' {$status} = await @ipa.user uid: 'user_add' attributes: userpassword: 'toto' - connection: ipa + connection: test.ipa $status.should.be.false() they 'modify password', ({ssh}) -> @@ -156,7 +156,7 @@ describe 'ipa.user', -> $ssh: ssh , -> @ipa.user.del - connection: ipa + connection: test.ipa uid: 'user_add' @ipa.user attributes: @@ -164,12 +164,12 @@ describe 'ipa.user', -> sn: 'Lastname' mail: [ 'user@nikita.js.org' ] userpassword: 'toto' - connection: ipa + connection: test.ipa uid: 'user_add' {$status} = await @ipa.user attributes: userpassword: 'toto' - connection: ipa + connection: test.ipa force_userpassword: true uid: 'user_add' $status.should.be.true() diff --git a/packages/ipa/test/user/show.coffee b/packages/ipa/test/user/show.coffee index 5c290ecbb..7e7a0823f 100644 --- a/packages/ipa/test/user/show.coffee +++ b/packages/ipa/test/user/show.coffee @@ -1,16 +1,16 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.user.show', -> + return unless test.tags.ipa describe 'schema', -> they 'use `username` as alias for `uid`', ({ssh}) -> - nikita.ipa.user.show connection: ipa, + nikita.ipa.user.show connection: test.ipa, username: 'admin' describe 'action', -> @@ -19,7 +19,7 @@ describe 'ipa.user.show', -> nikita $ssh: ssh , -> - {result} = await @ipa.user.show connection: ipa, + {result} = await @ipa.user.show connection: test.ipa, uid: 'admin' result.dn.should.match /^uid=admin,cn=users,cn=accounts,/ @@ -27,7 +27,7 @@ describe 'ipa.user.show', -> nikita $ssh: ssh , -> - @ipa.user.show connection: ipa, + @ipa.user.show connection: test.ipa, uid: 'missing' .should.be.rejectedWith code: 4001 diff --git a/packages/ipa/test/user/status.coffee b/packages/ipa/test/user/status.coffee index 8aee5d7fb..061beed2e 100644 --- a/packages/ipa/test/user/status.coffee +++ b/packages/ipa/test/user/status.coffee @@ -1,17 +1,17 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ipa} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ipa +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ipa.user.status', -> + return unless test.tags.ipa describe 'schema', -> they 'use `username` as alias for `uid`', ({ssh}) -> nikita - .ipa.user.status connection: ipa, + .ipa.user.status connection: test.ipa, username: 'user_status' , ({config: {uid}}) -> uid.should.eql 'user_status' @@ -22,7 +22,7 @@ describe 'ipa.user.status', -> nikita $ssh: ssh , -> - {result} = await @ipa.user.status connection: ipa, + {result} = await @ipa.user.status connection: test.ipa, uid: 'admin' result.dn.should.match /^uid=admin,cn=users,cn=accounts,/ @@ -30,7 +30,7 @@ describe 'ipa.user.status', -> nikita $ssh: ssh , -> - @ipa.user.status connection: ipa, + @ipa.user.status connection: test.ipa, uid: 'missing' .should.be.rejectedWith code: 4001 diff --git a/packages/java/README.md b/packages/java/README.md index e6fc3ec00..adab821b9 100644 --- a/packages/java/README.md +++ b/packages/java/README.md @@ -2,3 +2,24 @@ # Nikita "java" package The "java" package provides Nikita actions to work with Java keystores and truststores. + +## Usage + +```js +import "@nikitajs/java/register"; +import nikita from "@nikitajs/core"; + +const {$status} = await nikita.java.keystore.add({ + keystore: "~/path/to/keystore", + storepass: "changeit", + // Certificate authority + caname: "my_caname", + cacert: "~/path/to/certificates/cacert.pem", + // Certificate and key + cert: "~/path/to/certificates/node_1_cert.pem", + name: "my_name", + key: "~/path/to/certificates/node_1_key.pem", + keypass: "secret", +}); +console.info("Keystore was modified:", $status); +``` diff --git a/packages/java/env/openjdk9/docker-compose.yml b/packages/java/env/openjdk9/docker-compose.yml new file mode 100644 index 000000000..6db29b1c8 --- /dev/null +++ b/packages/java/env/openjdk9/docker-compose.yml @@ -0,0 +1,26 @@ + +services: + target: + build: + context: . + dockerfile: ./target/Dockerfile + image: nikita_java_openjdk9_target + container_name: nikita_java_openjdk9_target + volumes: + # Note, remove mounting point once "./test/resources" is removed + # and tmpdir is used with generated certificates. + - ../../../../:/nikita + # platform: linux/amd64 # Required on Apple M1 + nodejs: + build: + context: . + dockerfile: ./nodejs/Dockerfile + image: nikita_java_openjdk9_nodejs + container_name: nikita_java_openjdk9_nodejs + # platform: linux/amd64 # Required on Apple M1 + depends_on: + - target + volumes: + - ../../../../:/nikita + environment: + NIKITA_TEST_MODULE: /nikita/packages/java/env/openjdk9/test.coffee diff --git a/packages/java/env/openjdk9/entrypoint.sh b/packages/java/env/openjdk9/entrypoint.sh new file mode 100755 index 000000000..d01ae4a9f --- /dev/null +++ b/packages/java/env/openjdk9/entrypoint.sh @@ -0,0 +1,17 @@ +#!/bin/bash +set -e + +if test -t 0; then + # We have TTY, so probably an interactive container... + if [[ $@ ]]; then + # Transfer arguments to mocha + npx mocha $@ + else + # Run bash when no argument + export PS1='[\u@\h : \w]\$ ' + /bin/bash + fi +# Detached mode +else + npm run test:local +fi diff --git a/packages/java/env/openjdk9/nodejs/Dockerfile b/packages/java/env/openjdk9/nodejs/Dockerfile new file mode 100644 index 000000000..395605811 --- /dev/null +++ b/packages/java/env/openjdk9/nodejs/Dockerfile @@ -0,0 +1,27 @@ +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " + +ARG DEBIAN_FRONTEND=nonintercative +RUN \ + apt update -y && \ + # Install Node.js dependencies + apt install -y build-essential curl git iputils-ping + +ADD ./entrypoint.sh /entrypoint.sh +RUN mkdir -p /nikita +WORKDIR /nikita/packages/java + +# User +RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita +USER nikita + +# Install Node.js +ENV NODE_VERSION stable +RUN curl -L https://git.io/n-install | bash -s -- -y +# Note, bashrc not sourced unless running interactively +ENV PATH /home/nikita/n/bin:$PATH + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/service/env/ubuntu/run.sh b/packages/java/env/openjdk9/run.sh similarity index 100% rename from packages/service/env/ubuntu/run.sh rename to packages/java/env/openjdk9/run.sh diff --git a/packages/java/env/openjdk9/target/Dockerfile b/packages/java/env/openjdk9/target/Dockerfile new file mode 100644 index 000000000..39c46fd47 --- /dev/null +++ b/packages/java/env/openjdk9/target/Dockerfile @@ -0,0 +1,26 @@ +FROM adoptopenjdk/openjdk9:latest +LABEL org.opencontainers.image.authors="David Worms " + +RUN apt update -y && \ + # Install SSH and sudo + apt-get install -y openssh-server sudo && \ + ssh-keygen -A && \ + mkdir -p /run/sshd + +ADD ./entrypoint.sh /entrypoint.sh +RUN mkdir -p /nikita +WORKDIR /nikita/packages/core + +# Sudo User +RUN useradd nikita -d /home/nikita && \ + hash=$(echo "secret" | openssl passwd -1 -stdin) && \ + usermod --pass="$hash" nikita && \ + mkdir -p /home/nikita && \ + mkdir -p /home/nikita/.ssh && \ + chown -R nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita + +USER nikita + +ENTRYPOINT ["sudo", "/usr/sbin/sshd", "-D"] diff --git a/packages/java/env/openjdk9/test.coffee b/packages/java/env/openjdk9/test.coffee new file mode 100644 index 000000000..bb1d3a8e3 --- /dev/null +++ b/packages/java/env/openjdk9/test.coffee @@ -0,0 +1,17 @@ + +export default + tags: + java: true + # docker: # eg `docker-machine create --driver virtualbox nikita || docker-machine start nikita` + # host: 'dind:2375' + # # machine: 'nikita' + # service: + # name: 'ntp' + # srv_name: 'ntpd' + # chk_name: 'ntpd' + config: [ + label: 'remote' + ssh: + host: 'target', username: 'nikita', + password: 'secret' + ] diff --git a/packages/java/env/run.sh b/packages/java/env/run.sh new file mode 100755 index 000000000..a2885f0ee --- /dev/null +++ b/packages/java/env/run.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +set -e + +cd `pwd`/`dirname ${BASH_SOURCE}` + +./openjdk9/run.sh +# ./openjdk11/run.sh diff --git a/packages/java/lib/keystore_add/README.md b/packages/java/lib/keystore/add/README.md similarity index 93% rename from packages/java/lib/keystore_add/README.md rename to packages/java/lib/keystore/add/README.md index 07beeaab6..86fdd6053 100644 --- a/packages/java/lib/keystore_add/README.md +++ b/packages/java/lib/keystore/add/README.md @@ -1,5 +1,5 @@ -# `nikita.java.keystore_add` +# `nikita.java.keystore.add` Add certificates, private keys and certificate authorities to java keystores and truststores. @@ -35,7 +35,7 @@ alias value is "my-alias", the aliases will be "my-alias-0" then "my-alias-1"... ## Uploading public and private keys into a keystore ```js -const {$status} = await nikita.java.keystore_add([{ +const {$status} = await nikita.java.keystore.add([{ keystore: java_home + '/lib/security/cacerts', storepass: 'changeit', caname: 'my_ca_certificate', @@ -51,7 +51,7 @@ console.info(`Keystore was updated: ${$status}`) ## Uploading a certificate authority ```js -const {$status} = await nikita.java.keystore_add([{ +const {$status} = await nikita.java.keystore.add([{ keystore: java_home + '/lib/security/cacerts', storepass: 'changeit', caname: 'my_ca_certificate', diff --git a/packages/java/lib/keystore/add/index.js b/packages/java/lib/keystore/add/index.js new file mode 100644 index 000000000..35efabac3 --- /dev/null +++ b/packages/java/lib/keystore/add/index.js @@ -0,0 +1,217 @@ +// Dependencies +import dedent from "dedent"; +import utils from "@nikitajs/core/utils"; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; + +// Action +export default { + handler: async function ({ + config, + ssh, + metadata: { tmpdir }, + tools: { path }, + }) { + // Update paths in case of download + const files = { + cert: + ssh && config.local && config.cert != null + ? `${tmpdir}/${path.local.basename(config.cert)}` + : config.cert, + cacert: + ssh && config.local && config.cacert != null + ? `${tmpdir}/${path.local.basename(config.cacert)}` + : config.cacert, + key: + ssh && config.local && config.key != null + ? `${tmpdir}/${path.local.basename(config.key)}` + : config.key, + }; + // Temporary directory + // Used to upload certificates and to isolate certificates from their file + if (tmpdir) { + await this.fs.mkdir({ + $shy: true, + target: tmpdir, + mode: 0o0700, + }); + } + // Upload certificates + if (ssh && config.local && config.cacert) { + await this.file.download({ + $shy: true, + source: config.cacert, + target: files.cacert, + mode: 0o0600, + }); + } + if (ssh && config.local && config.cert) { + await this.file.download({ + $shy: true, + source: config.cert, + target: files.cert, + mode: 0o0600, + }); + } + if (ssh && config.local && config.key) { + await this.file.download({ + $shy: true, + source: config.key, + target: files.key, + mode: 0o0600, + }); + } + // Prepare parent directory + await this.fs.mkdir({ + parent: config.parent, + target: path.dirname(config.keystore), + }); + try { + if (!!config.cert) { + await this.execute({ + bash: true, + command: dedent` + # Detect openssl command + opensslbin=\`command -v ${config.openssl}\` || { + echo 'OpenSSL command line tool not detected'; exit 43 + } + # Detect keytool command + keytoolbin=\`command -v ${config.keytool}\` || { + if [ -x /usr/java/default/bin/keytool ]; then keytoolbin='/usr/java/default/bin/keytool'; + elif [ -x /opt/java/openjdk/bin/keytool ]; then keytoolbin='/opt/java/openjdk/bin/keytool'; + else exit 44; fi + } + # keytoolbin=\`command -v ${config.keytool}\` + [ -f ${files.cert} ] || (exit 45) + user=\`$opensslbin x509 -noout -in "${files.cert}" -sha1 -fingerprint | sed 's/\\(.*\\)=\\(.*\\)/\\2/'\` + # We are only retrieving the first certificate found in the chain with \`head -n 1\` + keystore=\`$keytoolbin -list -v -keystore ${config.keystore} -storepass ${config.storepass} -alias ${config.name} | grep SHA1: | head -n 1 | sed -E 's/.+SHA1: +(.*)/\\1/'\` + echo "User Certificate: $user" + echo "Keystore Certificate: $keystore" + if [ "$user" = "$keystore" ]; then exit 5; fi + # Create a PKCS12 file that contains key and certificate + $opensslbin pkcs12 -export -in "${files.cert}" -inkey "${files.key}" -out "${tmpdir}/pkcs12" -name ${config.name} -password pass:${config.keypass} + # Import PKCS12 into keystore + $keytoolbin -noprompt -importkeystore -destkeystore ${config.keystore} -deststorepass ${config.storepass} -destkeypass ${config.keypass} -srckeystore "${tmpdir}/pkcs12" -srcstoretype PKCS12 -srcstorepass ${config.keypass} -alias ${config.name} + `, + trap: true, + code: [ + 0, + 5, // OpenSSL exit 3 if file does not exists + ], + }); + } + } catch (error) { + if (error.exit_code === 43) { + throw Error("OpenSSL command line tool not detected."); + } + if (error.exit_code === 44) { + throw utils.error("NIKITA_JAVA_KEYTOOL_NOT_FOUND", [ + "Keytool command not detected,", + `searched ${JSON.stringify(config.keytool)}`, + ', "/usr/java/default/bin/keytool"', + ', and "/opt/java/openjdk/bin/keytool."', + ]); + } + if (error.exit_code === 45) { + throw utils.error("NIKITA_JAVA_KEYSTORE_NOT_FOUND", [ + "Keystore file does not exists", + `at location ${JSON.stringify(files.cert)}.`, + ]); + } + throw error; + } + try { + // Deal with CACert + if (config.cacert) { + await this.execute({ + bash: true, + command: dedent` + # Detect keytool command + keytoolbin=\`command -v ${config.keytool}\` || { + if [ -x /usr/java/default/bin/keytool ]; then keytoolbin='/usr/java/default/bin/keytool'; + elif [ -x /opt/java/openjdk/bin/keytool ]; then keytoolbin='/opt/java/openjdk/bin/keytool'; + else exit 43; fi + } + # Check password + if [ -f ${config.keystore} ] && ! $keytoolbin -list -keystore ${config.keystore} -storepass ${config.storepass} >/dev/null; then + exit 44 + fi + [ -f ${files.cacert} ] || (echo 'CA file doesnt not exists: ${files.cacert} 1>&2'; exit 3) + # Import CACert + PEM_FILE=${files.cacert} + CERTS=$(grep 'END CERTIFICATE' $PEM_FILE| wc -l) + code=5 + for N in $(seq 0 $(($CERTS - 1))); do + if [ $CERTS -eq '1' ]; then + ALIAS="${config.caname}" + else + ALIAS="${config.caname}-$N" + fi + # Isolate cert into a file + CACERT_FILE=${tmpdir}/$ALIAS + cat $PEM_FILE | awk "n==$N { print }; /END CERTIFICATE/ { n++ }" > $CACERT_FILE + # Read user CACert signature + user=\`${config.openssl} x509 -noout -in "$CACERT_FILE" -sha1 -fingerprint | sed 's/\\(.*\\)=\\(.*\\)/\\2/'\` + # Read registered CACert signature + keystore=\`$keytoolbin -list -v -keystore ${config.keystore} -storepass ${config.storepass} -alias $ALIAS | grep SHA1: | sed -E 's/.+SHA1: +(.*)/\\1/'\` + echo "User CA Cert: $user" + echo "Keystore CA Cert: $keystore" + if [ "$user" = "$keystore" ]; then echo 'Identical Signature'; code=5; continue; fi + # Remove CACert if signature doesnt match + if [ "$keystore" != "" ]; then + $keytoolbin -delete -keystore ${config.keystore} -storepass ${config.storepass} -alias $ALIAS + fi + $keytoolbin -noprompt -import -trustcacerts -keystore ${config.keystore} -storepass ${config.storepass} -alias $ALIAS -file ${tmpdir}/$ALIAS + code=0 + done + exit $code + `, + trap: true, + code: [0, 5], + }); + } + } catch (error) { + if (error.exit_code === 43) { + throw utils.error("NIKITA_JAVA_KEYTOOL_NOT_FOUND", [ + "Keytool command not detected,", + `searched ${JSON.stringify(config.keytool)}`, + ', "/usr/java/default/bin/keytool"', + ', and "/opt/java/openjdk/bin/keytool."', + ]); + } + if (error.exit_code === 44) { + throw utils.error("NIKITA_JAVA_KEYSTORE_INVALID_PASSWORD", [ + "Keystore password is invalid,", + "change it manually with:", + `\`keytool -storepasswd -keystore ${esa( + config.keystore + )} -storepass -new '\``, + ]); + } + if (error.exit_code === 3) { + throw Error(`CA file does not exist: ${files.cacert}`); + } + throw error; + } + // Ensure ownerships and permissions + if (config.uid != null || config.gid != null) { + await this.fs.chown({ + target: config.keystore, + uid: config.uid, + gid: config.gid, + }); + } + if (config.mode != null) { + await this.fs.chmod({ + target: config.keystore, + mode: config.mode, + }); + } + return void 0; + }, + metadata: { + tmpdir: true, + definitions: definitions, + }, +}; diff --git a/packages/java/lib/keystore_add/schema.json b/packages/java/lib/keystore/add/schema.json similarity index 89% rename from packages/java/lib/keystore_add/schema.json rename to packages/java/lib/keystore/add/schema.json index 7ec0db5a0..e27fa4e58 100644 --- a/packages/java/lib/keystore_add/schema.json +++ b/packages/java/lib/keystore/add/schema.json @@ -26,7 +26,7 @@ "description": "Path to OpenSSl command line tool." }, "parent": { - "$ref": "module://@nikitajs/core/lib/actions/fs/mkdir#/definitions/config/properties/parent" + "$ref": "module://@nikitajs/core/actions/fs/mkdir#/definitions/config/properties/parent" }, "keystore": { "type": "string", @@ -61,7 +61,7 @@ }, "keypass": { "type": "string", - "description": "Password used to protect the certigficate and its key access\ninside the keystore." + "description": "Password used to protect the certificate and its key access\ninside the keystore." }, "name": { "type": "string", diff --git a/packages/java/lib/keystore/exists/README.md b/packages/java/lib/keystore/exists/README.md new file mode 100644 index 000000000..2c505b29f --- /dev/null +++ b/packages/java/lib/keystore/exists/README.md @@ -0,0 +1,4 @@ + +# `nikita.java.keystore.exists` + +Check if a given alias is registered inside a keystore. diff --git a/packages/java/lib/keystore/exists/index.js b/packages/java/lib/keystore/exists/index.js new file mode 100644 index 000000000..a9374d622 --- /dev/null +++ b/packages/java/lib/keystore/exists/index.js @@ -0,0 +1,44 @@ +// Dependencies +import dedent from "dedent"; +import utils from "@nikitajs/core/utils"; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; + +// Action +export default { + handler: async function ({ config }) { + const { $status } = await this.execute({ + bash: true, + command: dedent` + # Detect keytool command + keytoolbin=\`command -v ${esa(config.keytool)}\` || { + if [ -x /usr/java/default/bin/keytool ]; then keytoolbin='/usr/java/default/bin/keytool'; + elif [ -x /opt/java/openjdk/bin/keytool ]; then keytoolbin='/opt/java/openjdk/bin/keytool'; + else exit 44; fi + } + $keytoolbin -list \ + -keystore ${esa(config.keystore)} \ + -storepass ${esa(config.storepass)} \ + -alias ${esa(config.name)} + `, + trap: true, + code: [0, 1], + }).catch((error) => { + if (error.exit_code === 44) { + throw utils.error("NIKITA_JAVA_KEYTOOL_NOT_FOUND", [ + "Keytool command not detected,", + `searched ${JSON.stringify(config.keytool)}`, + ', "/usr/java/default/bin/keytool"', + ', and "/opt/java/openjdk/bin/keytool."', + ]); + } + }); + return { exists: $status }; + }, + metadata: { + definitions: definitions, + metadata: { + shy: true, + }, + }, +}; diff --git a/packages/java/lib/keystore/exists/schema.json b/packages/java/lib/keystore/exists/schema.json new file mode 100644 index 000000000..ab7f05a46 --- /dev/null +++ b/packages/java/lib/keystore/exists/schema.json @@ -0,0 +1,29 @@ +{ + "config": { + "type": "object", + "properties": { + "keytool": { + "type": "string", + "default": "keytool", + "description": "Path to the `keytool` command, detetected from `$PATH` by default." + }, + "keystore": { + "type": "string", + "description": "Path to the keystore." + }, + "storepass": { + "type": "string", + "description": "Password to manage the keystore." + }, + "name": { + "type": "string", + "description": "Name (aka alias) under which is referenced the certificate inside the keystore." + } + }, + "required": [ + "keystore", + "storepass", + "name" + ] + } +} diff --git a/packages/java/lib/keystore_remove/README.md b/packages/java/lib/keystore/remove/README.md similarity index 80% rename from packages/java/lib/keystore_remove/README.md rename to packages/java/lib/keystore/remove/README.md index 03a510134..981187011 100644 --- a/packages/java/lib/keystore_remove/README.md +++ b/packages/java/lib/keystore/remove/README.md @@ -1,5 +1,5 @@ -# `nikita.java.keystore_remove` +# `nikita.java.keystore.remove` Remove certificates, private keys and certificate authorities from java keystores and truststores. @@ -7,7 +7,7 @@ keystores and truststores. ## Removing a key and its certificate ```js -const {$status} = await nikita.java.keystore_remove([{ +const {$status} = await nikita.java.keystore.remove([{ keystore: java_home + '/lib/security/cacerts', storepass: 'changeit', caname: 'my_ca_certificate', @@ -20,7 +20,7 @@ console.info(`Key and its certificate were updated: ${$status}`) ## Removing a certificate authority ```js -const {$status} = await nikita.java.keystore_remove([{ +const {$status} = await nikita.java.keystore.remove([{ keystore: java_home + '/lib/security/cacerts', storepass: 'changeit', caname: 'my_ca_certificate' diff --git a/packages/java/lib/keystore/remove/index.js b/packages/java/lib/keystore/remove/index.js new file mode 100644 index 000000000..0d465163c --- /dev/null +++ b/packages/java/lib/keystore/remove/index.js @@ -0,0 +1,45 @@ +// Dependencies +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; + +// Action +export default { + handler: async function({config}) { + const aliases = [...config.caname, ...config.name].join(' ').trim(); + await this.execute({ + bash: true, + command: dedent` + # Detect keytool command + keytoolbin=\`command -v ${config.keytool}\` || { + if [ -x /usr/java/default/bin/keytool ]; then keytoolbin='/usr/java/default/bin/keytool'; + elif [ -x /opt/java/openjdk/bin/keytool ]; then keytoolbin='/opt/java/openjdk/bin/keytool'; + else exit 43; fi + } + # Nothing to do if not a file + test -f "${config.keystore}" || exit 3 + count=0 + for alias in ${aliases}; do + if $keytoolbin -list -keystore "${config.keystore}" -storepass "${config.storepass}" -alias "$alias"; then + $keytoolbin -delete -keystore "${config.keystore}" -storepass "${config.storepass}" -alias "$alias" + (( count++ )) + fi + done + [ $count -eq 0 ] && exit 3 + exit 0 + `, + code: [0, 3] + }).catch((error) => { + if (error.exit_code === 43) { + throw utils.error("NIKITA_JAVA_KEYTOOL_NOT_FOUND", [ + "Keytool command not detected,", + `searched ${JSON.stringify(config.keytool)}`, + ', "/usr/java/default/bin/keytool"', + ', and "/opt/java/openjdk/bin/keytool."', + ]); + } + }); + }, + metadata: { + definitions: definitions + } +}; diff --git a/packages/java/lib/keystore_remove/schema.json b/packages/java/lib/keystore/remove/schema.json similarity index 94% rename from packages/java/lib/keystore_remove/schema.json rename to packages/java/lib/keystore/remove/schema.json index 4301d5fc4..7f529d3ac 100644 --- a/packages/java/lib/keystore_remove/schema.json +++ b/packages/java/lib/keystore/remove/schema.json @@ -19,7 +19,8 @@ "description": "Alias of the certificate authority (CA)." }, "keytool": { - "type": "boolean", + "type": "string", + "default": "keytool", "description": "Path to the `keytool` command, detetected from `$PATH` by default." }, "keystore": { diff --git a/packages/java/lib/keystore_add/index.js b/packages/java/lib/keystore_add/index.js deleted file mode 100644 index 2b8a60371..000000000 --- a/packages/java/lib/keystore_add/index.js +++ /dev/null @@ -1,198 +0,0 @@ -// Dependencies -const dedent = require('dedent'); -const definitions = require('./schema.json'); - -// Action -module.exports = { - handler: async function ({ - config, - ssh, - metadata: { tmpdir }, - tools: { path }, - }) { - var err, files; - // Update paths in case of download - files = { - cert: - ssh && config.local && config.cert != null - ? `${tmpdir}/${path.local.basename(config.cert)}` - : config.cert, - cacert: - ssh && config.local && config.cacert != null - ? `${tmpdir}/${path.local.basename(config.cacert)}` - : config.cacert, - key: - ssh && config.local && config.key != null - ? `${tmpdir}/${path.local.basename(config.key)}` - : config.key, - }; - // Temporary directory - // Used to upload certificates and to isolate certificates from their file - if (tmpdir) { - await this.fs.mkdir({ - $shy: true, - target: tmpdir, - mode: 0o0700, - }); - } - // Upload certificates - if (ssh && config.local && config.cacert) { - await this.file.download({ - $shy: true, - source: config.cacert, - target: files.cacert, - mode: 0o0600, - }); - } - if (ssh && config.local && config.cert) { - await this.file.download({ - $shy: true, - source: config.cert, - target: files.cert, - mode: 0o0600, - }); - } - if (ssh && config.local && config.key) { - await this.file.download({ - $shy: true, - source: config.key, - target: files.key, - mode: 0o0600, - }); - } - // Prepare parent directory - await this.fs.mkdir({ - parent: config.parent, - target: path.dirname(config.keystore), - }); - try { - if (!!config.cert) { - await this.execute({ - bash: true, - command: ` -# Detect openssl command -opensslbin=\`command -v ${config.openssl}\` || { - echo 'OpenSSL command line tool not detected'; exit 4 -} -# Detect keytool command -command -v ${config.keytool} >/dev/null || { - if [ -x /usr/java/default/bin/keytool ]; then keytoolbin='/usr/java/default/bin/keytool'; - else exit 7; fi -} -keytoolbin=\`command -v ${config.keytool}\` - - -#opensslbin=/usr/bin/openssl # OK -#opensslbin=/opt/homebrew/bin/openssl # KO - -#keytoolbin=/run/current-system/sw/bin/keytool -echo "************ $keytoolbin $opensslbin" -[ -f ${files.cert} ] || (exit 6) -user=\`$opensslbin x509 -noout -in "${files.cert}" -sha1 -fingerprint | sed 's/\\(.*\\)=\\(.*\\)/\\2/'\` -# We are only retrieving the first certificate found in the chain with \`head -n 1\` -keystore=\`$keytoolbin -list -v -keystore ${config.keystore} -storepass ${config.storepass} -alias ${config.name} | grep SHA1: | head -n 1 | sed -E 's/.+SHA1: +(.*)/\\1/'\` -echo "User Certificate: $user" -echo "Keystore Certificate: $keystore" -if [ "$user" = "$keystore" ]; then exit 5; fi -# Create a PKCS12 file that contains key and certificate -$opensslbin pkcs12 -export -in "${files.cert}" -inkey "${files.key}" -out "${tmpdir}/pkcs12" -name ${config.name} -password pass:${config.keypass} -# Import PKCS12 into keystore -$keytoolbin -noprompt -importkeystore -destkeystore ${config.keystore} -deststorepass ${config.storepass} -destkeypass ${config.keypass} -srckeystore "${tmpdir}/pkcs12" -srcstoretype PKCS12 -srcstorepass ${config.keypass} -alias ${config.name}`, - trap: true, - code: [ - 0, - 5, // OpenSSL exit 3 if file does not exists - ], - }); - } - } catch (error) { - err = error; - if (err.exit_code === 4) { - throw Error("OpenSSL command line tool not detected"); - } - if (err.exit_code === 6) { - throw Error("Keystore file does not exists"); - } - if (err.exit_code === 6) { - throw Error("Missing Requirement: command keytool is not detected"); - } - throw err; - } - try { - // Deal with CACert - if (config.cacert) { - await this.execute({ - bash: true, - command: `# Detect keytool command -keytoolbin=${config.keytool} -command -v $keytoolbin >/dev/null || { - if [ -x /usr/java/default/bin/keytool ]; then keytoolbin='/usr/java/default/bin/keytool'; - else exit 7; fi -} -# Check password -if [ -f ${config.keystore} ] && ! \${keytoolbin} -list -keystore ${config.keystore} -storepass ${config.storepass} >/dev/null; then - # Keystore password is invalid, change it manually with: - # keytool -storepasswd -keystore ${config.keystore} -storepass \${old_pasword} -new ${config.storepass} - exit 2 -fi -[ -f ${files.cacert} ] || (echo 'CA file doesnt not exists: ${files.cacert} 1>&2'; exit 3) -# Import CACert -PEM_FILE=${files.cacert} -CERTS=$(grep 'END CERTIFICATE' $PEM_FILE| wc -l) -code=5 -for N in $(seq 0 $(($CERTS - 1))); do - if [ $CERTS -eq '1' ]; then - ALIAS="${config.caname}" - else - ALIAS="${config.caname}-$N" - fi - # Isolate cert into a file - CACERT_FILE=${tmpdir}/$ALIAS - cat $PEM_FILE | awk "n==$N { print }; /END CERTIFICATE/ { n++ }" > $CACERT_FILE - # Read user CACert signature - user=\`${config.openssl} x509 -noout -in "$CACERT_FILE" -sha1 -fingerprint | sed 's/\\(.*\\)=\\(.*\\)/\\2/'\` - # Read registered CACert signature - keystore=\`\${keytoolbin} -list -v -keystore ${config.keystore} -storepass ${config.storepass} -alias $ALIAS | grep SHA1: | sed -E 's/.+SHA1: +(.*)/\\1/'\` - echo "User CA Cert: $user" - echo "Keystore CA Cert: $keystore" - if [ "$user" = "$keystore" ]; then echo 'Identical Signature'; code=5; continue; fi - # Remove CACert if signature doesnt match - if [ "$keystore" != "" ]; then - \${keytoolbin} -delete -keystore ${config.keystore} -storepass ${config.storepass} -alias $ALIAS - fi - \${keytoolbin} -noprompt -import -trustcacerts -keystore ${config.keystore} -storepass ${config.storepass} -alias $ALIAS -file ${tmpdir}/$ALIAS - code=0 -done -exit $code`, - trap: true, - code: [0, 5], - }); - } - } catch (error) { - err = error; - if (err.exit_code === 3) { - throw Error(`CA file does not exist: ${files.cacert}`); - } - throw err; - } - // Ensure ownerships and permissions - if (config.uid != null || config.gid != null) { - await this.fs.chown({ - target: config.keystore, - uid: config.uid, - gid: config.gid, - }); - } - if (config.mode != null) { - await this.fs.chmod({ - target: config.keystore, - mode: config.mode, - }); - } - return void 0; - }, - metadata: { - tmpdir: true, - definitions: definitions, - }, -}; diff --git a/packages/java/lib/keystore_remove/index.js b/packages/java/lib/keystore_remove/index.js deleted file mode 100644 index 45f3a5ca5..000000000 --- a/packages/java/lib/keystore_remove/index.js +++ /dev/null @@ -1,39 +0,0 @@ -// Dependencies -const definitions = require('./schema.json'); - -// Action -module.exports = { - handler: async function({config}) { - var aliases; - // config.caname = [config.caname] unless Array.isArray config.caname - // config.name = [config.name] unless Array.isArray config.name - aliases = [...config.caname, ...config.name].join(' ').trim(); - if (config.keytool == null) { - config.keytool = 'keytool'; - } - return (await this.execute({ - bash: true, - command: `# Detect keytool command - keytoolbin=${config.keytool} - command -v $keytoolbin >/dev/null || { - if [ -x /usr/java/default/bin/keytool ]; then keytoolbin='/usr/java/default/bin/keytool'; - else exit 7; fi - } - test -f "${config.keystore}" || # Nothing to do if not a file - exit 3 - count=0 - for alias in ${aliases}; do - if \${keytoolbin} -list -keystore "${config.keystore}" -storepass "${config.storepass}" -alias "$alias"; then - \${keytoolbin} -delete -keystore "${config.keystore}" -storepass "${config.storepass}" -alias "$alias" - (( count++ )) - fi - done - [ $count -eq 0 ] && exit 3 - exit 0`, - code: [0, 3] - })); - }, - metadata: { - definitions: definitions - } -}; diff --git a/packages/java/lib/register.js b/packages/java/lib/register.js index 06e46d22d..f38ce4e36 100644 --- a/packages/java/lib/register.js +++ b/packages/java/lib/register.js @@ -1,21 +1,16 @@ - // Dependencies -require('@nikitajs/file/lib/register'); -const registry = require('@nikitajs/core/lib/registry'); +import "@nikitajs/file/register"; +import registry from "@nikitajs/core/registry"; // Action registration -module.exports = { +const actions = { java: { - keystore_add: '@nikitajs/java/lib/keystore_add', - keystore_remove: '@nikitajs/java/lib/keystore_remove' - } + keystore: { + exists: "@nikitajs/java/keystore/exists", + add: "@nikitajs/java/keystore/add", + remove: "@nikitajs/java/keystore/remove", + }, + }, }; -(async function() { - try { - return (await registry.register(module.exports)); - } catch (error) { - console.error(error.stack); - return process.exit(1); - } -})(); +await registry.register(actions); diff --git a/packages/java/package.json b/packages/java/package.json index 3617284d0..261ff334d 100644 --- a/packages/java/package.json +++ b/packages/java/package.json @@ -1,5 +1,6 @@ { "name": "@nikitajs/java", + "version": "1.0.0-alpha.3", "description": "Provides Nikita actions to work with Java keystores and truststores.", "keywords": [ "nikita", @@ -11,7 +12,6 @@ "java", "keystore" ], - "version": "1.0.0-alpha.3", "author": "David Worms ", "bugs": { "url": "https://github.com/adaltas/node-nikita/issues" @@ -54,20 +54,24 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + "./register": "./lib/register.js", + "./*": "./lib/*/index.js" + }, "files": [ "/lib" ], "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/java/lib/register" - ], "inline-diffs": true, - "timeout": 20000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/java/register", + "should" + ], + "throw-deprecation": true, + "timeout": 20000 }, "publishConfig": { "access": "public" @@ -80,7 +84,9 @@ "directory": "packages/java" }, "scripts": { - "test": "npm run test:local", + "test": "npm run test:local && npm run test:env", + "test:env": "env/run.sh", "test:local": "mocha 'test/**/*.coffee'" - } + }, + "type": "module" } diff --git a/packages/java/test.sample.coffee b/packages/java/test.sample.coffee index 8efed6669..c43cc3459 100644 --- a/packages/java/test.sample.coffee +++ b/packages/java/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: posix: true config: [ @@ -12,5 +12,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/java/test/keystore_add.coffee b/packages/java/test/keystore/add.coffee similarity index 61% rename from packages/java/test/keystore_add.coffee rename to packages/java/test/keystore/add.coffee index 5e8181aed..307281883 100644 --- a/packages/java/test/keystore_add.coffee +++ b/packages/java/test/keystore/add.coffee @@ -1,33 +1,34 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) +__dirname = new URL( '.', import.meta.url).pathname -return unless tags.posix - -describe 'java.keystore_add', -> +describe 'java.keystore.add', -> + return unless test.tags.java describe 'schema', -> it 'cacert implies caname', -> - await nikita.java.keystore_add + await nikita.java.keystore.add $handler: (->) keystore: "ok" storepass: "ok" cacert: "ok" caname: 'ok' - await nikita.java.keystore_add + await nikita.java.keystore.add keystore: "ok" storepass: "ok" cacert: "implies caname" .should.be.rejectedWith [ 'NIKITA_SCHEMA_VALIDATION_CONFIG:' - 'one error was found in the configuration of action `java.keystore_add`:' + 'one error was found in the configuration of action `java.keystore.add`:' '#/dependencies/cacert/required config must have required property \'caname\'.' ].join ' ' it 'cert implies key, keypass and name', -> - await nikita.java.keystore_add + await nikita.java.keystore.add $handler: (->) keystore: "ok" storepass: "ok" @@ -35,13 +36,13 @@ describe 'java.keystore_add', -> key: "ok" keypass: "ok" name: 'ok' - await nikita.java.keystore_add + await nikita.java.keystore.add keystore: "ok" storepass: "ok" cert: "implies key, keypass and name" .should.be.rejectedWith [ 'NIKITA_SCHEMA_VALIDATION_CONFIG:' - 'multiple errors were found in the configuration of action `java.keystore_add`:' + 'multiple errors were found in the configuration of action `java.keystore.add`:' '#/dependencies/cert/required config must have required property \'key\';' '#/dependencies/cert/required config must have required property \'keypass\';' '#/dependencies/cert/required config must have required property \'name\'.' @@ -54,14 +55,14 @@ describe 'java.keystore_add', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - {$status} = await @java.keystore_add + {$status} = await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_caname" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" - cert: "#{__dirname}/keystore/certs1/node_1_cert.pem" + cacert: "#{__dirname}/../resources/certs1/cacert.pem" + cert: "#{__dirname}/../resources/certs1/node_1_cert.pem" name: "my_name" - key: "#{__dirname}/keystore/certs1/node_1_key.pem" + key: "#{__dirname}/../resources/certs1/node_1_key.pem" keypass: 'mypassword' $status.should.be.true() @@ -72,11 +73,11 @@ describe 'java.keystore_add', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - {$status} = await @java.keystore_add + {$status} = await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" + cacert: "#{__dirname}/../resources/certs1/cacert.pem" $status.should.be.true() they 'create parent directory', ({ssh}) -> @@ -84,29 +85,29 @@ describe 'java.keystore_add', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - {$status} = await @java.keystore_add + {$status} = await @java.keystore.add keystore: "#{tmpdir}/a/dir/cacerts" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" + cacert: "#{__dirname}/../resources/certs1/cacert.pem" $status.should.be.true() they 'detect existing cacert signature', ({ssh}) -> nikita - $ssh: null + $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - await @java.keystore_add + await @java.keystore.add $shy: true keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" - {$status} = await @java.keystore_add + cacert: "#{__dirname}/../resources/certs1/cacert.pem" + {$status} = await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" + cacert: "#{__dirname}/../resources/certs1/cacert.pem" $status.should.be.false() they 'update a new cacert with same alias', ({ssh}) -> @@ -114,28 +115,32 @@ describe 'java.keystore_add', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - await @java.keystore_add + await @java.keystore.add $shy: true keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" - {$status} = await @java.keystore_add + cacert: "#{__dirname}/../resources/certs1/cacert.pem" + {$status} = await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs2/cacert.pem" + cacert: "#{__dirname}/../resources/certs2/cacert.pem" $status.should.be.true() - await @execute.assert - command: "keytool -list -keystore #{tmpdir}/keystore -storepass changeit -alias my_alias" - content: /^my_alias,/m + await @java.keystore.exists + keystore: "#{tmpdir}/keystore" + storepass: "changeit" + name: "my_alias" + .then(({exists}) => exists) + .should.be.finally.equal true + they 'fail if CA file does not exist', ({ssh}) -> nikita $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @java.keystore_add + @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" @@ -152,33 +157,46 @@ describe 'java.keystore_add', -> command: """ mkdir #{tmpdir}/tmp cd #{tmpdir}/tmp + # Generate 1st intermediate certificate key and CSR ("ca_int1.key.pem" and "ca_int1.req") openssl req -new -nodes -out ca_int1.req -keyout ca_int1.key.pem -subj /CN=CAIntermediate1 -newkey rsa:2048 -sha512 - openssl x509 -req -in ca_int1.req -CAkey #{__dirname}/keystore/certs1/cacert_key.pem -CA #{__dirname}/keystore/certs1/cacert.pem -days 20 -set_serial 01 -sha512 -out ca_int1.cert.pem + # Sign 1st intermediate certificate with certs1 ("ca_int1.cert.pem") + openssl x509 -req -in ca_int1.req -CAkey #{__dirname}/../resources/certs1/cacert_key.pem -CA #{__dirname}/../resources/certs1/cacert.pem -days 20 -set_serial 01 -sha512 -out ca_int1.cert.pem + # Generate 2nd intermediate certificate key and CSR ("ca_int2.key.pem" and "ca_int2.req") openssl req -new -nodes -out ca_int2.req -keyout ca_int2.key.pem -subj /CN=CAIntermediate2 -newkey rsa:2048 -sha512 + # Sign 2nd intermediate certificate with 1st intermediate certificate ("ca_int2.cert.pem") openssl x509 -req -in ca_int2.req -CAkey ca_int1.key.pem -CA ca_int1.cert.pem -days 20 -set_serial 01 -sha512 -out ca_int2.cert.pem - cat #{__dirname}/keystore/certs1/cacert.pem ca_int1.cert.pem ca_int2.cert.pem > ca.cert.pem + cat #{__dirname}/../resources/certs1/cacert.pem ca_int1.cert.pem ca_int2.cert.pem > ca.cert.pem """ - {$status} = await @java.keystore_add + {$status} = await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" cacert: "#{tmpdir}/tmp/ca.cert.pem" $status.should.be.true() - {$status} = await @java.keystore_add + {$status} = await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" cacert: "#{tmpdir}/tmp/ca.cert.pem" $status.should.be.false() - await @execute.assert - command: "keytool -list -keystore #{tmpdir}/keystore -storepass changeit -alias my_alias-0" - content: /^my_alias-0,/m - await @execute.assert - command: "keytool -list -keystore #{tmpdir}/keystore -storepass changeit -alias my_alias-1" - content: /^my_alias-1,/m - await @execute.assert - command: "keytool -list -keystore #{tmpdir}/keystore -storepass changeit -alias my_alias-2" - content: /^my_alias-2,/m + await @java.keystore.exists + keystore: "#{tmpdir}/keystore" + storepass: "changeit" + name: "my_alias-0" + .then(({exists}) => exists) + .should.be.finally.equal true + await @java.keystore.exists + keystore: "#{tmpdir}/keystore" + storepass: "changeit" + name: "my_alias-1" + .then(({exists}) => exists) + .should.be.finally.equal true + await @java.keystore.exists + keystore: "#{tmpdir}/keystore" + storepass: "changeit" + name: "my_alias-2" + .then(({exists}) => exists) + .should.be.finally.equal true they 'honors status with certificate chain', ({ssh}) -> nikita @@ -190,21 +208,21 @@ describe 'java.keystore_add', -> mkdir #{tmpdir}/ca cd #{tmpdir}/ca openssl req -new -nodes -out ca_int1.req -keyout ca_int1.key.pem -subj /CN=CAIntermediate1 -newkey rsa:2048 -sha512 - openssl x509 -req -in ca_int1.req -CAkey #{__dirname}/keystore/certs1/cacert_key.pem -CA #{__dirname}/keystore/certs1/cacert.pem -days 20 -set_serial 01 -sha512 -out ca_int1.cert.pem + openssl x509 -req -in ca_int1.req -CAkey #{__dirname}/../resources/certs1/cacert_key.pem -CA #{__dirname}/../resources/certs1/cacert.pem -days 20 -set_serial 01 -sha512 -out ca_int1.cert.pem openssl req -new -nodes -out ca_int2a.req -keyout ca_int2a.key.pem -subj /CN=CAIntermediate2 -newkey rsa:2048 -sha512 openssl x509 -req -in ca_int2a.req -CAkey ca_int1.key.pem -CA ca_int1.cert.pem -days 20 -set_serial 01 -sha512 -out ca_int2a.cert.pem - cat #{__dirname}/keystore/certs1/cacert.pem ca_int1.cert.pem ca_int2a.cert.pem > ca.a.cert.pem + cat #{__dirname}/../resources/certs1/cacert.pem ca_int1.cert.pem ca_int2a.cert.pem > ca.a.cert.pem openssl req -new -nodes -out ca_int2b.req -keyout ca_int2b.key.pem -subj /CN=CAIntermediate2 -newkey rsa:2048 -sha512 openssl x509 -req -in ca_int2b.req -CAkey ca_int1.key.pem -CA ca_int1.cert.pem -days 20 -set_serial 01 -sha512 -out ca_int2b.cert.pem - cat #{__dirname}/keystore/certs1/cacert.pem ca_int1.cert.pem ca_int2b.cert.pem > ca.b.cert.pem + cat #{__dirname}/../resources/certs1/cacert.pem ca_int1.cert.pem ca_int2b.cert.pem > ca.b.cert.pem """ - {$status} = await @java.keystore_add + {$status} = await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" cacert: "#{tmpdir}/ca/ca.a.cert.pem" $status.should.be.true() - {$status} = await @java.keystore_add + {$status} = await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" @@ -218,13 +236,13 @@ describe 'java.keystore_add', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - {$status} = await @java.keystore_add + {$status} = await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" - key: "#{__dirname}/keystore/certs1/node_1_key.pem" - cert: "#{__dirname}/keystore/certs1/node_1_cert.pem" + cacert: "#{__dirname}/../resources/certs1/cacert.pem" + key: "#{__dirname}/../resources/certs1/node_1_key.pem" + cert: "#{__dirname}/../resources/certs1/node_1_cert.pem" keypass: 'mypassword' name: 'node_1' $status.should.be.true() @@ -234,23 +252,23 @@ describe 'java.keystore_add', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - {$status} = await @java.keystore_add + {$status} = await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" - key: "#{__dirname}/keystore/certs1/node_1_key.pem" - cert: "#{__dirname}/keystore/certs1/node_1_cert.pem" + cacert: "#{__dirname}/../resources/certs1/cacert.pem" + key: "#{__dirname}/../resources/certs1/node_1_key.pem" + cert: "#{__dirname}/../resources/certs1/node_1_cert.pem" keypass: 'mypassword' name: 'node_1' $status.should.be.true() - {$status} = await @java.keystore_add + {$status} = await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" - key: "#{__dirname}/keystore/certs1/node_1_key.pem" - cert: "#{__dirname}/keystore/certs1/node_1_cert.pem" + cacert: "#{__dirname}/../resources/certs1/cacert.pem" + key: "#{__dirname}/../resources/certs1/node_1_key.pem" + cert: "#{__dirname}/../resources/certs1/node_1_cert.pem" keypass: 'mypassword' name: 'node_1' $status.should.be.false() @@ -260,22 +278,22 @@ describe 'java.keystore_add', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - await @java.keystore_add + await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" - key: "#{__dirname}/keystore/certs1/node_1_key.pem" - cert: "#{__dirname}/keystore/certs1/node_1_cert.pem" + cacert: "#{__dirname}/../resources/certs1/cacert.pem" + key: "#{__dirname}/../resources/certs1/node_1_key.pem" + cert: "#{__dirname}/../resources/certs1/node_1_cert.pem" keypass: 'mypassword' name: 'node_1' - {$status} = await @java.keystore_add + {$status} = await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs2/cacert.pem" - key: "#{__dirname}/keystore/certs2/node_1_key.pem" - cert: "#{__dirname}/keystore/certs2/node_1_cert.pem" + cacert: "#{__dirname}/../resources/certs2/cacert.pem" + key: "#{__dirname}/../resources/certs2/node_1_key.pem" + cert: "#{__dirname}/../resources/certs2/node_1_cert.pem" keypass: 'mypassword' name: 'node_1' $status.should.be.true() @@ -287,16 +305,16 @@ describe 'java.keystore_add', -> $ssh: ssh tmpdir: true , ({metadata: {tmpdir}}) -> - await @java.keystore_add + await @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" - {$status} = await @java.keystore_add - keystore: "#{tmpdir}/keystore" + cacert: "#{__dirname}/../resources/certs1/cacert.pem" + {$status} = await @java.keystore.add + keystore: "#{tmpdir}/resources" storepass: "changednow" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" + cacert: "#{__dirname}/../resources/certs1/cacert.pem" $status.should.be.true() describe 'config openssl', -> @@ -306,15 +324,15 @@ describe 'java.keystore_add', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @java.keystore_add + @java.keystore.add keystore: "#{tmpdir}/keystore" storepass: "changeit" caname: "my_alias" - cacert: "#{__dirname}/keystore/certs2/cacert.pem" - key: "#{__dirname}/keystore/certs2/node_1_key.pem" - cert: "#{__dirname}/keystore/certs2/node_1_cert.pem" + cacert: "#{__dirname}/../resources/certs2/cacert.pem" + key: "#{__dirname}/../resources/certs2/node_1_key.pem" + cert: "#{__dirname}/../resources/certs2/node_1_cert.pem" keypass: 'mypassword' openssl: '/doesnt/not/exists' name: 'node_1' .should.be.rejectedWith - message: 'OpenSSL command line tool not detected' + message: 'OpenSSL command line tool not detected.' diff --git a/packages/java/test/keystore/exists.coffee b/packages/java/test/keystore/exists.coffee new file mode 100644 index 000000000..d5ed22c08 --- /dev/null +++ b/packages/java/test/keystore/exists.coffee @@ -0,0 +1,39 @@ + +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) +__dirname = new URL( '.', import.meta.url).pathname + +describe 'java.keystore.exists', -> + return unless test.tags.java + + they 'with existing alias', ({ssh}) -> + nikita + $ssh: ssh + $tmpdir: true + , ({metadata: {tmpdir}}) -> + await @java.keystore.add + $shy: true + keystore: "#{tmpdir}/keystore" + storepass: "changeit" + caname: "my_alias" + cacert: "#{__dirname}/../resources/certs1/cacert.pem" + await @java.keystore.exists + keystore: "#{tmpdir}/keystore" + storepass: "changeit" + name: "my_alias" + .then(({exists}) => exists) + .should.be.finally.equal true + + they 'with missing alias', ({ssh}) -> + nikita + $ssh: ssh + $tmpdir: true + , ({metadata: {tmpdir}}) -> + await @java.keystore.exists + keystore: "#{tmpdir}/keystore" + storepass: "changeit" + name: "my_alias" + .then(({exists}) => exists) + .should.be.finally.equal false diff --git a/packages/java/test/keystore/remove.coffee b/packages/java/test/keystore/remove.coffee new file mode 100644 index 000000000..a58803ff5 --- /dev/null +++ b/packages/java/test/keystore/remove.coffee @@ -0,0 +1,120 @@ + +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) +__dirname = new URL( '.', import.meta.url).pathname + +describe 'java.keystore.remove', -> + return unless test.tags.java + + describe 'schema and config', -> + + it 'either name of cname is required', -> + await nikita.java.keystore.remove + $handler: (->) + keystore: "ok" + storepass: "ok" + caname: "ok" + .should.be.fulfilled() + await nikita.java.keystore.remove + $handler: (->) + keystore: "ok" + storepass: "ok" + name: "ok" + .should.be.fulfilled() + await nikita.java.keystore.remove + keystore: "ok" + storepass: "ok" + .should.be.rejectedWith [ + 'NIKITA_SCHEMA_VALIDATION_CONFIG:' + 'multiple errors were found in the configuration of action `java.keystore.remove`:' + '#/definitions/config/anyOf config must match a schema in anyOf;' + '#/definitions/config/anyOf/0/required config must have required property \'name\';' + '#/definitions/config/anyOf/1/required config must have required property \'caname\'.' + ].join ' ' + + they 'keystore doesnt need to exists', ({ssh}) -> + nikita + $ssh: ssh + $tmpdir: true + , ({metadata: {tmpdir}}) -> + {$status} = await @java.keystore.remove + keystore: "#{tmpdir}/does/not/exist" + storepass: "invalid" + caname: "invalid" + $status.should.be.false() + + they 'caname and name are provided', ({ssh}) -> + nikita + $ssh: ssh + , -> + await @java.keystore.remove + keystore: "invalid" + storepass: "invalid" + caname: "my_caname" + name: "my_name" + + describe 'cacert', -> + + they 'remove cacerts', ({ssh}) -> + nikita + $ssh: ssh + $tmpdir: true + , ({metadata: {tmpdir}}) -> + await @java.keystore.add + keystore: "#{tmpdir}/cacerts" + storepass: 'changeit' + caname: 'my_alias' + cacert: "#{__dirname}/../resources/certs1/cacert.pem" + {$status} = await @java.keystore.remove + keystore: "#{tmpdir}/cacerts" + storepass: 'changeit' + caname: 'my_alias' + $status.should.be.true() + {$status} = await @java.keystore.remove + keystore: "#{tmpdir}/cacerts" + storepass: 'changeit' + caname: 'my_alias' + $status.should.be.false() + await @java.keystore.exists + keystore: "#{tmpdir}/cacerts" + storepass: "changeit" + name: "my_alias" + .then(({exists}) => exists) + .should.be.finally.equal false + + describe 'key', -> + + they 'remove cacerts file', ({ssh}) -> + nikita + $ssh: ssh + $tmpdir: true + , ({metadata: {tmpdir}}) -> + await @java.keystore.add + keystore: "#{tmpdir}/cacerts" + storepass: 'changeit' + caname: 'my_alias' + cacert: "#{__dirname}/../resources/certs1/cacert.pem" + key: "#{__dirname}/../resources/certs1/node_1_key.pem" + cert: "#{__dirname}/../resources/certs1/node_1_cert.pem" + keypass: 'mypassword' + name: 'node_1' + {$status} = await @java.keystore.remove + keystore: "#{tmpdir}/cacerts" + storepass: 'changeit' + name: 'node_1' + keypass: 'mypassword' + $status.should.be.true() + {$status} = await @java.keystore.remove + keystore: "#{tmpdir}/cacerts" + storepass: 'changeit' + name: 'node_1' + keypass: 'mypassword' + $status.should.be.false() + await @java.keystore.exists + keystore: "#{tmpdir}/cacerts" + storepass: "changeit" + name: "node_1" + .then(({exists}) => exists) + .should.be.finally.equal false diff --git a/packages/java/test/keystore_remove.coffee b/packages/java/test/keystore_remove.coffee deleted file mode 100644 index 511513358..000000000 --- a/packages/java/test/keystore_remove.coffee +++ /dev/null @@ -1,125 +0,0 @@ - -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix - -describe 'java.keystore_remove', -> - - describe 'schema and config', -> - - it 'either name of cname is required', -> - await nikita.java.keystore_remove - $handler: (->) - keystore: "ok" - storepass: "ok" - caname: "ok" - .should.be.fulfilled() - await nikita.java.keystore_remove - $handler: (->) - keystore: "ok" - storepass: "ok" - name: "ok" - .should.be.fulfilled() - await nikita.java.keystore_remove - keystore: "ok" - storepass: "ok" - .should.be.rejectedWith [ - 'NIKITA_SCHEMA_VALIDATION_CONFIG:' - 'multiple errors were found in the configuration of action `java.keystore_remove`:' - '#/definitions/config/anyOf config must match a schema in anyOf;' - '#/definitions/config/anyOf/0/required config must have required property \'name\';' - '#/definitions/config/anyOf/1/required config must have required property \'caname\'.' - ].join ' ' - - they 'keystore doesnt need to exists', ({ssh}) -> - nikita - $ssh: ssh - $tmpdir: true - , ({metadata: {tmpdir}}) -> - {$status} = await @java.keystore_remove - keystore: "#{tmpdir}/does/not/exist" - storepass: "invalid" - caname: "invalid" - $status.should.be.false() - - they 'caname and name are provided', ({ssh}) -> - nikita - $ssh: ssh - , -> - await @java.keystore_remove - keystore: "invalid" - storepass: "invalid" - caname: "my_caname" - name: "my_name" - - describe 'cacert', -> - - they 'remove cacerts', ({ssh}) -> - nikita - $ssh: ssh - $tmpdir: true - , ({metadata: {tmpdir}}) -> - keystore = "#{tmpdir}/cacerts" - caname = 'my_alias' - storepass = 'changeit' - await @java.keystore_add - keystore: "#{keystore}" - storepass: "#{storepass}" - caname: "#{caname}" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" - {$status} = await @java.keystore_remove - keystore: "#{keystore}" - storepass: "#{storepass}" - caname: "#{caname}" - $status.should.be.true() - {$status} = await @java.keystore_remove - keystore: "#{keystore}" - storepass: "#{storepass}" - caname: "#{caname}" - $status.should.be.false() - await @execute - command: """ - keytool -list -keystore #{keystore} -storepass #{storepass} -alias #{caname} - """ - code: 1 - - describe 'key', -> - - they 'remove cacerts file', ({ssh}) -> - nikita - $ssh: ssh - $tmpdir: true - , ({metadata: {tmpdir}}) -> - keystore = "#{tmpdir}/cacerts" - caname = 'my_alias' - storepass = 'changeit' - keypass = 'mypassword' - name = 'node_1' - await @java.keystore_add - keystore: "#{keystore}" - storepass: "#{storepass}" - caname: "#{caname}" - cacert: "#{__dirname}/keystore/certs1/cacert.pem" - key: "#{__dirname}/keystore/certs1/node_1_key.pem" - cert: "#{__dirname}/keystore/certs1/node_1_cert.pem" - keypass: 'mypassword' - name: "#{name}" - {$status} = await @java.keystore_remove - keystore: "#{keystore}" - storepass: "#{storepass}" - name: "#{name}" - keypass: "#{keypass}" - $status.should.be.true() - {$status} = await @java.keystore_remove - keystore: "#{keystore}" - storepass: "#{storepass}" - name: "#{name}" - keypass: "#{keypass}" - $status.should.be.false() - await @execute - command: """ - keytool -list -keystore #{keystore} -storepass #{storepass} -alias #{name} - """ - code: 1 diff --git a/packages/java/test/keystore/certs1/cacert.pem b/packages/java/test/resources/certs1/cacert.pem similarity index 100% rename from packages/java/test/keystore/certs1/cacert.pem rename to packages/java/test/resources/certs1/cacert.pem diff --git a/packages/java/test/keystore/certs1/cacert.seq b/packages/java/test/resources/certs1/cacert.seq similarity index 100% rename from packages/java/test/keystore/certs1/cacert.seq rename to packages/java/test/resources/certs1/cacert.seq diff --git a/packages/java/test/keystore/certs1/cacert_key.pem b/packages/java/test/resources/certs1/cacert_key.pem similarity index 100% rename from packages/java/test/keystore/certs1/cacert_key.pem rename to packages/java/test/resources/certs1/cacert_key.pem diff --git a/packages/java/test/keystore/certs1/generate b/packages/java/test/resources/certs1/generate similarity index 100% rename from packages/java/test/keystore/certs1/generate rename to packages/java/test/resources/certs1/generate diff --git a/packages/java/test/keystore/certs1/node_1_cert.pem b/packages/java/test/resources/certs1/node_1_cert.pem similarity index 100% rename from packages/java/test/keystore/certs1/node_1_cert.pem rename to packages/java/test/resources/certs1/node_1_cert.pem diff --git a/packages/java/test/keystore/certs1/node_1_key.pem b/packages/java/test/resources/certs1/node_1_key.pem similarity index 100% rename from packages/java/test/keystore/certs1/node_1_key.pem rename to packages/java/test/resources/certs1/node_1_key.pem diff --git a/packages/java/test/keystore/certs2/cacert.pem b/packages/java/test/resources/certs2/cacert.pem similarity index 100% rename from packages/java/test/keystore/certs2/cacert.pem rename to packages/java/test/resources/certs2/cacert.pem diff --git a/packages/java/test/keystore/certs2/cacert.seq b/packages/java/test/resources/certs2/cacert.seq similarity index 100% rename from packages/java/test/keystore/certs2/cacert.seq rename to packages/java/test/resources/certs2/cacert.seq diff --git a/packages/java/test/keystore/certs2/cacert_key.pem b/packages/java/test/resources/certs2/cacert_key.pem similarity index 100% rename from packages/java/test/keystore/certs2/cacert_key.pem rename to packages/java/test/resources/certs2/cacert_key.pem diff --git a/packages/java/test/keystore/certs2/generate b/packages/java/test/resources/certs2/generate similarity index 100% rename from packages/java/test/keystore/certs2/generate rename to packages/java/test/resources/certs2/generate diff --git a/packages/java/test/keystore/certs2/node_1_cert.pem b/packages/java/test/resources/certs2/node_1_cert.pem similarity index 100% rename from packages/java/test/keystore/certs2/node_1_cert.pem rename to packages/java/test/resources/certs2/node_1_cert.pem diff --git a/packages/java/test/keystore/certs2/node_1_key.pem b/packages/java/test/resources/certs2/node_1_key.pem similarity index 100% rename from packages/java/test/keystore/certs2/node_1_key.pem rename to packages/java/test/resources/certs2/node_1_key.pem diff --git a/packages/java/test/test.coffee b/packages/java/test/test.coffee index f17f2b1bb..ccc602447 100644 --- a/packages/java/test/test.coffee +++ b/packages/java/test/test.coffee @@ -1,14 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config +export default config.default diff --git a/packages/krb5/README.md b/packages/krb5/README.md index be031387c..3c395cbb8 100644 --- a/packages/krb5/README.md +++ b/packages/krb5/README.md @@ -2,3 +2,21 @@ # Nikita "krb5" package The "krb5" package provides Nikita actions for various Kerberos 5 operations. + +## Usage + +```js +import "@nikitajs/krb5/register"; +import nikita from "@nikitajs/core"; + +const {$status} = await nikita.krb5.addprinc({ + principal: "nikita@DOMAIN.COM", + randkey: true, + admin: { + server: "krb5.domain.com", + principal: "admin/admin@DOMAIN.COM", + password: "admin", + }, +}); +console.info("Principal was modified:", $status); +``` diff --git a/packages/krb5/env/krb5/nodejs/Dockerfile b/packages/krb5/env/krb5/nodejs/Dockerfile index 2f7870f79..b1a0673fb 100644 --- a/packages/krb5/env/krb5/nodejs/Dockerfile +++ b/packages/krb5/env/krb5/nodejs/Dockerfile @@ -1,55 +1,45 @@ -FROM centos:7 -MAINTAINER David Worms +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " + +RUN apt update -y && \ + DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC \ + apt install -y \ + # System + tzdata \ + # Node.js dependencies + build-essential curl git iputils-ping \ + # SSH server and client + openssh-server \ + # Sudo to start ssh + sudo \ + # Java and Kerberos + krb5-user && \ + # SSH configuration + ssh-keygen -A && \ + mkdir -p /run/sshd -# Install Node.js -ENV NODE_VERSION stable -RUN \ - yum install -y git make \ - && curl -L https://git.io/n-install | bash -s -- -y \ - && . ~/.bashrc && n $NODE_VERSION - -# Install SSH -RUN \ - yum install -y openssh-server openssh-clients \ - && ssh-keygen -t rsa -f ~/.ssh/id_rsa -N '' \ - && cat ~/.ssh/id_rsa.pub > ~/.ssh/authorized_keys \ - && ssh-keygen -A - -# Install Java -RUN yum install -y java +ADD ./entrypoint.sh /entrypoint.sh +RUN mkdir -p /nikita +WORKDIR /nikita/packages/krb5 # Install Krb5 client -RUN yum install -y krb5-workstation ADD ./krb5.conf /etc/krb5.conf -# Install Misc dependencies -RUN yum install -y zip unzip bzip2 git - -# Install PostgreSQL client -RUN yum install -y postgresql - -## Install Mysql client -RUN yum install -y mysql +# User as sudoer +RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +USER nikita -# Install openssl -RUN yum install -y openssl +# SSH certificate +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys -# Install docker -RUN yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo -RUN yum install -y docker-ce -RUN curl -L https://github.com/docker/compose/releases/download/1.18.0/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose -RUN chmod +x /usr/local/bin/docker-compose - -# Ruby & Gem -RUN yum install -y gcc ruby ruby-devel - -# Cgroup -RUN yum install -y libcgroup-tools - -RUN yum clean all - -ADD ./entrypoint.sh /entrypoint.sh -RUN mkdir -p /nikita -WORKDIR /nikita/packages/krb5 +# Node.js +ENV NODE_VERSION stable +RUN curl -L https://git.io/n-install | bash -s -- -y +ENV PATH /home/nikita/n/bin:$PATH ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/krb5/env/krb5/nodejs/entrypoint.sh b/packages/krb5/env/krb5/nodejs/entrypoint.sh index 26ef0ed6d..4c25347ed 100755 --- a/packages/krb5/env/krb5/nodejs/entrypoint.sh +++ b/packages/krb5/env/krb5/nodejs/entrypoint.sh @@ -1,28 +1,28 @@ #!/bin/bash set -e -# kadmin NODE.DC1.CONSUL -p admin/admin -s krb5 -w admin -q 'listprincs' +# kadmin -r NODE.DC1.CONSUL -p admin/admin -s krb5 -w admin -q 'listprincs' until echo admin | kinit admin/admin do echo 'waiting for kinit to succeed' - sleep 4 + sleep 2 done +# Source Node.js +. ~/.bashrc # Start ssh daemon -/usr/sbin/sshd +sudo /usr/sbin/sshd +# We have TTY, so probably an interactive container... if test -t 0; then - # We have TTY, so probably an interactive container... + # Some command(s) has been passed to container? Execute them and exit. + # No commands provided? Run bash. if [[ $@ ]]; then - # Transfer arguments to mocha - . ~/.bashrc - npx mocha $@ + node_modules/.bin/mocha $@ else - # Run bash when no argument export PS1='[\u@\h : \w]\$ ' /bin/bash fi +# Detached mode else - # Detached mode - . ~/.bashrc npm run test:local fi diff --git a/packages/krb5/env/krb5/server/Dockerfile b/packages/krb5/env/krb5/server/Dockerfile index 798a45c8c..bb7cbbf3c 100644 --- a/packages/krb5/env/krb5/server/Dockerfile +++ b/packages/krb5/env/krb5/server/Dockerfile @@ -1,8 +1,5 @@ -# docker build --tag nikita_krb5 . -# Run the container and tail /var/log/kerberos/krb5kdc.log -# docker run --rm -it nikita_krb5 - FROM centos:7 +LABEL org.opencontainers.image.authors="David Worms " # EPEL RUN yum install -y epel-release diff --git a/packages/krb5/env/krb5/test.coffee b/packages/krb5/env/krb5/test.coffee index 4b36dd13d..49e68aee1 100644 --- a/packages/krb5/env/krb5/test.coffee +++ b/packages/krb5/env/krb5/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: krb5: true krb5_addprinc: true @@ -16,5 +16,5 @@ module.exports = label: 'remote' ssh: host: '127.0.0.1', username: process.env.USER, - private_key_path: '~/.ssh/id_rsa' + private_key_path: '~/.ssh/id_ed25519' ] diff --git a/packages/krb5/lib/addprinc/index.js b/packages/krb5/lib/addprinc/index.js index 92d966a22..585e1566a 100644 --- a/packages/krb5/lib/addprinc/index.js +++ b/packages/krb5/lib/addprinc/index.js @@ -1,10 +1,10 @@ // Dependencies -const utils = require('@nikitajs/core/lib/utils'); -const definitions = require('./schema.json'); +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; // ## Exports -module.exports = { +export default { handler: async function({config}) { if (/.*@.*/.test(config.admin?.principal)) { // Normalize realm and principal for later usage of config diff --git a/packages/krb5/lib/addprinc/schema.json b/packages/krb5/lib/addprinc/schema.json index aa05a3d6a..9b8d0d163 100644 --- a/packages/krb5/lib/addprinc/schema.json +++ b/packages/krb5/lib/addprinc/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "admin": { - "$ref": "module://@nikitajs/krb5/lib/execute#/definitions/config/properties/admin" + "$ref": "module://@nikitajs/krb5/execute#/definitions/config/properties/admin" }, "keytab": { "type": "string", diff --git a/packages/krb5/lib/delprinc/index.js b/packages/krb5/lib/delprinc/index.js index 5b2c2ce89..10ab137f8 100644 --- a/packages/krb5/lib/delprinc/index.js +++ b/packages/krb5/lib/delprinc/index.js @@ -1,10 +1,10 @@ // Dependencies -const utils = require('@nikitajs/core/lib/utils'); -const definitions = require('./schema.json'); +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (/.*@.*/.test(config.admin.principal)) { // Normalize realm and principal for later usage of config diff --git a/packages/krb5/lib/delprinc/schema.json b/packages/krb5/lib/delprinc/schema.json index e64fe7732..3e389a284 100644 --- a/packages/krb5/lib/delprinc/schema.json +++ b/packages/krb5/lib/delprinc/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "admin": { - "$ref": "module://@nikitajs/krb5/lib/execute#/definitions/config/properties/admin" + "$ref": "module://@nikitajs/krb5/execute#/definitions/config/properties/admin" }, "keytab": { "type": "string", diff --git a/packages/krb5/lib/execute/index.js b/packages/krb5/lib/execute/index.js index 7e58eca8a..2ad0c70ed 100644 --- a/packages/krb5/lib/execute/index.js +++ b/packages/krb5/lib/execute/index.js @@ -1,10 +1,10 @@ // Dependencies -const utils = require('@nikitajs/core/lib/utils'); -const definitions = require('./schema.json'); +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const realm = config.admin.realm ? `-r ${config.admin.realm}` : ''; const {stdout} = await this.execute({ diff --git a/packages/krb5/lib/ktadd/index.js b/packages/krb5/lib/ktadd/index.js index aae466dfe..24f975624 100644 --- a/packages/krb5/lib/ktadd/index.js +++ b/packages/krb5/lib/ktadd/index.js @@ -1,11 +1,11 @@ // Dependencies -const path = require('path'); -const utils = require('@nikitajs/core/lib/utils'); -const definitions = require('./schema.json'); +import path from 'node:path' +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({ config, tools: {log} diff --git a/packages/krb5/lib/ktadd/schema.json b/packages/krb5/lib/ktadd/schema.json index 3938eaefd..6236f1dcf 100644 --- a/packages/krb5/lib/ktadd/schema.json +++ b/packages/krb5/lib/ktadd/schema.json @@ -3,17 +3,17 @@ "type": "object", "properties": { "admin": { - "$ref": "module://@nikitajs/krb5/lib/execute#/definitions/config/properties/admin" + "$ref": "module://@nikitajs/krb5/execute#/definitions/config/properties/admin" }, "gid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/gid" }, "keytab": { "type": "string", "description": "Path to the file storing key entries." }, "mode": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/file#/definitions/config/properties/mode" }, "principal": { "type": "string", @@ -24,7 +24,7 @@ "description": "The realm the principal belongs to." }, "uid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/krb5/lib/ktutil/add/index.js b/packages/krb5/lib/ktutil/add/index.js index 84d074cd5..087718764 100644 --- a/packages/krb5/lib/ktutil/add/index.js +++ b/packages/krb5/lib/ktutil/add/index.js @@ -1,17 +1,14 @@ - // Dependencies -const utils = require('@nikitajs/core/lib/utils'); -const definitions = require('./schema.json'); +import dedent from "dedent"; +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { - handler: async function({ - config, - tools: {log} - }) { +export default { + handler: async function ({ config, tools: { log } }) { if (/^\S+@\S+$/.test(config.principal)) { if (config.realm == null) { - config.realm = config.principal.split('@')[1]; + config.realm = config.principal.split("@")[1]; } } else { if (!config.realm) { @@ -23,19 +20,27 @@ module.exports = { let princ_entries = []; let princ = {}; // Get keytab entries - const {$status: entriesExist, stdout: entriesStdout} = await this.execute({ - $shy: true, - command: `echo -e 'rkt ${config.keytab}\nlist -e -t \n' | ktutil`, - code: [0, 1] - }); + const { $status: entriesExist, stdout: entriesStdout, stderr } = await this.execute( + { + $shy: true, + command: dedent` + [ ! -f ${config.keytab} ] && exit 42 + ktutil <<"CMD" + read_kt ${config.keytab} + list -e -t + CMD + `, + code: [0, 42, 1], + } + ); if (entriesExist) { - log({ - message: "Principals exist in Keytab, check kvno validity", - level: 'DEBUG' - }); + log("DEBUG", "Principals exist in Keytab, check kvno validity"); const lines = utils.string.lines(entriesStdout); for (const line of lines) { - const match = /^\s*(\d+)\s*(\d+)\s+([\d\/:]+\s+[\d\/:]+)\s+(.*)\s*\(([\w|-]*)\)\s*$/.exec(line); + const match = + /^\s*(\d+)\s*(\d+)\s+([\d\/:]+\s+[\d\/:]+)\s+(.*)\s*\(([\w|-]*)\)\s*$/.exec( + line + ); if (!match) { continue; } @@ -45,65 +50,74 @@ module.exports = { kvno: parseInt(kvno, 10), timestamps: timestamp, principal: principal.trim(), - enctype: enctype + enctype: enctype, }); } - princ_entries = entries.filter(function(e) { - return `${e.principal}` === `${config.principal}`; - }).reverse(); + princ_entries = entries + .filter((e) => `${e.principal}` === `${config.principal}`) + .reverse(); } // Get principal information and compare to keytab entries kvnos - const {$status: principalExists, stdout: principalStdout} = await this.krb5.execute({ - $shy: true, - admin: config.admin, - command: `getprinc -terse ${config.principal}` - }); + const { $status: principalExists, stdout: principalStdout } = + await this.krb5.execute({ + $shy: true, + admin: config.admin, + command: `getprinc -terse ${config.principal}`, + }); if (principalExists) { let values = utils.string.lines(principalStdout)[1]; if (!values) { // Check if a ticket exists for this throw Error(`Principal does not exist: '${config.principal}'`); } - values = values.split('\t'); + values = values.split("\t"); princ = { mdate: parseInt(values[2], 10) * 1000, - kvno: parseInt(values[8], 10) + kvno: parseInt(values[8], 10), }; } // read keytab and check kvno validities - const removeCommand = config.enctypes.map( (enctype) => { - const filteredPrincEntries = princ_entries.filter( - (entry) => entry.enctype === enctype - ); - const entry = - filteredPrincEntries.length === 1 - ? entries.filter((entry) => entry.enctype === enctype)[0] - : null; - // remove entry if kvno not identical - if (entry === null || entry?.kvno === princ.kvno) { - return - } - log({ - message: `Remove from Keytab kvno '${entry.kvno}', principal kvno '${princ.kvno}'`, - level: 'INFO' - }); - return `delete_entry ${entry != null ? entry.slot : void 0}`; - }).filter(Boolean) + const removeCommand = config.enctypes + .map((enctype) => { + const filteredPrincEntries = princ_entries.filter( + (entry) => entry.enctype === enctype + ); + const entry = + filteredPrincEntries.length === 1 + ? entries.filter((entry) => entry.enctype === enctype)[0] + : null; + // remove entry if kvno not identical + if (entry === null || entry?.kvno === princ.kvno) { + return; + } + log( + "INFO", + `Remove from Keytab kvno '${entry.kvno}', principal kvno '${princ.kvno}'` + ); + return `delete_entry ${entry != null ? entry.slot : void 0}`; + }) + .filter(Boolean); const tmp_keytab = `${config.keytab}.tmp_nikita_${Date.now()}`; if (entries.length > princ_entries.length) { await this.execute({ $if: removeCommand.length, - command: `echo -e 'rkt ${config.keytab}\n` + removeCommand.join('\n') + `\nwkt ${tmp_keytab}\nquit\n' | ktutil` + command: dedent` + ktutil <<"EOF" + read_kt ${config.keytab} + ${removeCommand.join("\n")} + write_kt ${tmp_keytab} + EOF + ` }); await this.fs.move({ $if: removeCommand.length, source: tmp_keytab, - target: config.keytab + target: config.keytab, }); } - if ((entries.length === princ_entries.length) && removeCommand.length) { + if (entries.length === princ_entries.length && removeCommand.length) { await this.fs.remove({ - target: config.keytab + target: config.keytab, }); } // write entries in keytab @@ -124,23 +138,28 @@ module.exports = { .filter(Boolean); await this.execute({ $if: createCommand.length, - command: "echo -e '" + createCommand.join('\n') + `\nwkt ${config.keytab}\n' | ktutil` + command: dedent` + ktutil <<"EOF" + ${createCommand.join("\n")} + wkt ${config.keytab} + EOF + ` }); // Keytab ownership and permissions await this.fs.chown({ $if: config.uid != null || config.gid != null, target: config.keytab, uid: config.uid, - gid: config.gid + gid: config.gid, }); await this.fs.chmod({ $if: config.mode, target: config.keytab, - mode: config.mode + mode: config.mode, }); }, metadata: { - global: 'krb5', - definitions: definitions - } + global: "krb5", + definitions: definitions, + }, }; diff --git a/packages/krb5/lib/ktutil/add/schema.json b/packages/krb5/lib/ktutil/add/schema.json index c6b68f616..64c37d6b1 100644 --- a/packages/krb5/lib/ktutil/add/schema.json +++ b/packages/krb5/lib/ktutil/add/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "admin": { - "$ref": "module://@nikitajs/krb5/lib/execute#/definitions/config/properties/admin" + "$ref": "module://@nikitajs/krb5/execute#/definitions/config/properties/admin" }, "enctypes": { "type": "array", @@ -19,14 +19,14 @@ "description": "The enctypes used by krb5_server." }, "gid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/gid" }, "keytab": { "type": "string", "description": "Path to the file storing key entries." }, "mode": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/mode" + "$ref": "module://@nikitajs/file#/definitions/config/properties/mode" }, "password": { "type": "string", @@ -41,7 +41,7 @@ "description": "The realm the principal belongs to." }, "uid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/krb5/lib/register.js b/packages/krb5/lib/register.js index d71122c9d..09489e411 100644 --- a/packages/krb5/lib/register.js +++ b/packages/krb5/lib/register.js @@ -1,26 +1,19 @@ // Dependencies -const registry = require('@nikitajs/core/lib/registry'); +import registry from "@nikitajs/core/registry"; // Action registration -module.exports = { +const actions = { krb5: { - addprinc: '@nikitajs/krb5/lib/addprinc', - delprinc: '@nikitajs/krb5/lib/delprinc', - execute: '@nikitajs/krb5/lib/execute', - ktadd: '@nikitajs/krb5/lib/ktadd', - ticket: '@nikitajs/krb5/lib/ticket', + addprinc: '@nikitajs/krb5/addprinc', + delprinc: '@nikitajs/krb5/delprinc', + execute: '@nikitajs/krb5/execute', + ktadd: '@nikitajs/krb5/ktadd', + ticket: '@nikitajs/krb5/ticket', ktutil: { - add: '@nikitajs/krb5/lib/ktutil/add' + add: '@nikitajs/krb5/ktutil/add' } } }; -(async function() { - try { - return (await registry.register(module.exports)); - } catch (error) { - console.error(error.stack); - return process.exit(1); - } -})(); +await registry.register(actions) diff --git a/packages/krb5/lib/ticket/index.js b/packages/krb5/lib/ticket/index.js index 40047b5de..73ea7dcb2 100644 --- a/packages/krb5/lib/ticket/index.js +++ b/packages/krb5/lib/ticket/index.js @@ -1,11 +1,11 @@ // Dependencies -const dedent = require('dedent'); -const utils = require('@nikitajs/krb5/lib/utils'); -const definitions = require('./schema.json'); +import dedent from "dedent"; +import utils from '@nikitajs/krb5/utils'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { await this.execute({ command: dedent` diff --git a/packages/krb5/lib/ticket/schema.json b/packages/krb5/lib/ticket/schema.json index 01938b080..6042b4672 100644 --- a/packages/krb5/lib/ticket/schema.json +++ b/packages/krb5/lib/ticket/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "gid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/gid" }, "principal": { "type": "string", @@ -18,7 +18,7 @@ "description": "Path to the file storing key entries." }, "uid": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/file#/definitions/config/properties/uid" } }, "oneOf": [ diff --git a/packages/krb5/lib/utils/index.js b/packages/krb5/lib/utils/index.js index d240f76a2..f1e0b0481 100644 --- a/packages/krb5/lib/utils/index.js +++ b/packages/krb5/lib/utils/index.js @@ -1,7 +1,8 @@ -const utils = require('@nikitajs/core/lib/utils'); +import utils from "@nikitajs/core/utils"; +import krb5 from '@nikitajs/krb5/utils/krb5' -module.exports = { +export default { ...utils, - krb5: require('./krb5') + krb5: krb5 }; diff --git a/packages/krb5/lib/utils/krb5.js b/packages/krb5/lib/utils/krb5.js index ad1158c44..34e1d08cb 100644 --- a/packages/krb5/lib/utils/krb5.js +++ b/packages/krb5/lib/utils/krb5.js @@ -1,26 +1,30 @@ - // Miscellaneous kerberos functions -const krb5 = { - kinit: function(config) { - let command = "kinit"; - if (config.keytab === true) { - " -k"; - } else if (config.keytab && typeof config.keytab === 'string') { - command += ` -kt ${config.keytab}`; - } else if (config.password) { - command = `echo ${config.password} | ${command}`; - } else { - throw Error("Incoherent config: expects one of keytab or password"); - } - command += ` ${config.principal}`; - return command = krb5.su(config, command); - }, - su: function(config, command) { - if (config.uid) { - command = `su - ${config.uid} -c '${command}'`; - } - return command; + +const kinit = function (config) { + let command = "kinit"; + if (config.keytab === true) { + (" -k"); + } else if (config.keytab && typeof config.keytab === "string") { + command += ` -kt ${config.keytab}`; + } else if (config.password) { + command = `echo ${config.password} | ${command}`; + } else { + throw Error("Incoherent config: expects one of keytab or password"); } + command += ` ${config.principal}`; + return (command = su(config, command)); }; -module.exports = krb5; +const su = function (config, command) { + if (config.uid) { + command = `su - ${config.uid} -c '${command}'`; + } + return command; +}; + +export { kinit, su }; + +export default { + kinit: kinit, + su: su, +}; diff --git a/packages/krb5/package.json b/packages/krb5/package.json index e0cd1f0e1..998015337 100644 --- a/packages/krb5/package.json +++ b/packages/krb5/package.json @@ -1,5 +1,6 @@ { "name": "@nikitajs/krb5", + "version": "1.0.0-alpha.3", "description": "Provides Nikita actions for various Kerberos 5 operations.", "keywords": [ "nikita", @@ -13,7 +14,6 @@ "system", "task" ], - "version": "1.0.0-alpha.3", "author": "David Worms ", "bugs": { "url": "https://github.com/adaltas/node-nikita/issues" @@ -56,20 +56,26 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + "./register": "./lib/register.js", + "./utils": "./lib/utils/index.js", + "./utils/*": "./lib/utils/*.js", + "./*": "./lib/*/index.js" + }, "files": [ "/lib" ], "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/krb5/lib/register" - ], "inline-diffs": true, - "timeout": 40000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/krb5/register", + "should" + ], + "throw-deprecation": true, + "timeout": 40000 }, "publishConfig": { "access": "public" @@ -85,5 +91,6 @@ "test": "npm run test:local && npm run test:env", "test:env": "env/run.sh", "test:local": "mocha 'test/**/*.coffee'" - } + }, + "type": "module" } diff --git a/packages/krb5/test.sample.coffee b/packages/krb5/test.sample.coffee index 7470f1457..fd43fdae3 100644 --- a/packages/krb5/test.sample.coffee +++ b/packages/krb5/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: api: false krb5: false @@ -21,5 +21,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/krb5/test/addprinc.coffee b/packages/krb5/test/addprinc.coffee index accbe1fb0..f1b5dae93 100644 --- a/packages/krb5/test/addprinc.coffee +++ b/packages/krb5/test/addprinc.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, krb5} = require './test' -they = require('mocha-they')(config) - -return unless tags.krb5_addprinc +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'krb5.addprinc', -> + return unless test.tags.krb5_addprinc describe 'schema', -> @@ -25,8 +25,8 @@ describe 'krb5.addprinc', -> it 'one of password or randkey must be provided', -> nikita .krb5.addprinc - admin: krb5 - principal: "nikita@#{krb5.realm}" + admin: test.krb5 + principal: "nikita@#{test.krb5.realm}" .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' message: [ @@ -42,81 +42,81 @@ describe 'krb5.addprinc', -> they 'create a new principal with a randkey', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 , -> await @krb5.delprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" {$status} = await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" randkey: true $status.should.be.true() {$status} = await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" randkey: true $status.should.be.false() they 'create a new principal with a password', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 , -> await @krb5.delprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" {$status} = await @krb5.addprinc - principal: "nikita@#{krb5.realm}" - password: 'password1' + principal: "nikita@#{test.krb5.realm}" + password: 'secret_1' + # Change password $status.should.be.true() {$status} = await @krb5.addprinc - principal: "nikita@#{krb5.realm}" - password: 'password2' + principal: "nikita@#{test.krb5.realm}" + password: 'secret_2' password_sync: true $status.should.be.true() + # Check status {$status} = await @krb5.addprinc - principal: "nikita@#{krb5.realm}" - password: 'password2' + principal: "nikita@#{test.krb5.realm}" + password: 'secret_2' password_sync: true $status.should.be.false() + # Check token + {$status} = await @execute + command: "echo secret_2 | kinit nikita@#{test.krb5.realm}" + $status.should.be.true() they 'dont overwrite password', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 , -> await @krb5.delprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" {$status} = await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" password: 'password1' $status.should.be.true() {$status} = await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" password: 'password2' password_sync: false # Default $status.should.be.false() await @execute - command: "echo password1 | kinit nikita@#{krb5.realm}" + command: "echo password1 | kinit nikita@#{test.krb5.realm}" - they 'call function with new style', ({ssh}) -> - user = - password: 'user123' - password_sync: true - principal: 'user2@NODE.DC1.CONSUL' + they 'with keybab', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 - , -> + krb5: admin: test.krb5 + $tmpdir: true + , ({metadata: {tmpdir}}) -> await @fs.remove - target: '/etc/security/keytabs/user1.service.keytab' + target: "#{tmpdir}/user1.service.keytab" await @krb5.delprinc - principal: user.principal - await @krb5.delprinc - principal: "user1/krb5@NODE.DC1.CONSUL" - await @krb5.addprinc - principal: "user1/krb5@NODE.DC1.CONSUL" + principal: "user1/krb5@#{test.krb5.realm}" + {$status} = await @krb5.addprinc + principal: "user1/krb5@#{test.krb5.realm}" randkey: true - keytab: '/etc/security/keytabs/user1.service.keytab' - {$status} = await @krb5.addprinc user + keytab: "#{tmpdir}/user1.service.keytab" $status.should.be.true() {$status} = await @execute - command: "echo #{user.password} | kinit #{user.principal}" + command: "kinit -kt #{tmpdir}/user1.service.keytab user1/krb5@#{test.krb5.realm}" $status.should.be.true() diff --git a/packages/krb5/test/delprinc.coffee b/packages/krb5/test/delprinc.coffee index 88513092f..fcf1f123e 100644 --- a/packages/krb5/test/delprinc.coffee +++ b/packages/krb5/test/delprinc.coffee @@ -1,31 +1,31 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, krb5} = require './test' -they = require('mocha-they')(config) - -return unless tags.krb5_delprinc +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'krb5.delprinc', -> + return unless test.tags.krb5_delprinc they 'a principal which exists', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 , -> await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" randkey: true {$status} = await @krb5.delprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" $status.should.be.true() they 'a principal which does not exist', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 , -> await @krb5.delprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" {$status} = await @krb5.delprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" $status.should.be.false() diff --git a/packages/krb5/test/execute.coffee b/packages/krb5/test/execute.coffee index 55533f1f4..a08e6db16 100644 --- a/packages/krb5/test/execute.coffee +++ b/packages/krb5/test/execute.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, krb5} = require './test' -they = require('mocha-they')(config) - -return unless tags.krb5 +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'krb5.execute', -> + return unless test.tags.krb5 describe 'schema', -> @@ -26,7 +26,7 @@ describe 'krb5.execute', -> they 'global properties', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 , -> {stdout} = await @krb5.execute command: 'listprincs' @@ -37,7 +37,7 @@ describe 'krb5.execute', -> $ssh: ssh , -> {stdout} = await @krb5.execute - admin: krb5 + admin: test.krb5 command: 'listprincs' stdout.should.containEql 'kadmin/admin' @@ -46,12 +46,12 @@ describe 'krb5.execute', -> $ssh: ssh , -> {$status} = await @krb5.execute - admin: krb5 + admin: test.krb5 command: 'listprincs' - grep: krb5.principal + grep: test.krb5.principal $status.should.be.true() {$status} = await @krb5.execute - admin: krb5 + admin: test.krb5 command: 'listprincs' grep: "missing string" $status.should.be.false() @@ -61,12 +61,12 @@ describe 'krb5.execute', -> $ssh: ssh , -> {$status, stdout} = await @krb5.execute - admin: krb5 + admin: test.krb5 command: 'listprincs' grep: /^.*@.*$/ $status.should.be.true() {$status, stdout} = await @krb5.execute - admin: krb5 + admin: test.krb5 command: 'listprincs' grep: /^.*missing.*$/ $status.should.be.false() diff --git a/packages/krb5/test/ktadd.coffee b/packages/krb5/test/ktadd.coffee index bf253beae..e83e295bc 100644 --- a/packages/krb5/test/ktadd.coffee +++ b/packages/krb5/test/ktadd.coffee @@ -1,69 +1,69 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, krb5} = require './test' -they = require('mocha-they')(config) - -return unless tags.krb5_ktadd +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'krb5.ktadd', -> + return unless test.tags.krb5_ktadd they 'create a new keytab', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 $tmpdir: true , ({metadata: {tmpdir}}) -> await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" randkey: true {$status} = await @krb5.ktadd - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita.keytab" $status.should.be.true() {$status} = await @krb5.ktadd - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita.keytab" $status.should.be.false() they 'detect kvno', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 $tmpdir: true , ({metadata: {tmpdir}}) -> await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" randkey: true await @krb5.ktadd - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita_1.keytab" await @krb5.ktadd - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita_2.keytab" {$status} = await @krb5.ktadd - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita_1.keytab" $status.should.be.true() {$status} = await @krb5.ktadd - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita_1.keytab" $status.should.be.false() they 'change permission', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 $tmpdir: true , ({metadata: {tmpdir}}) -> await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" randkey: true await @krb5.ktadd - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita.keytab" mode: 0o0755 {$status} = await @krb5.ktadd - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita.keytab" mode: 0o0707 $status.should.be.true() diff --git a/packages/krb5/test/ktutil/add.coffee b/packages/krb5/test/ktutil/add.coffee index 6cdfb1e7d..b2d3fb2e1 100644 --- a/packages/krb5/test/ktutil/add.coffee +++ b/packages/krb5/test/ktutil/add.coffee @@ -1,17 +1,17 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, krb5} = require '../test' -they = require('mocha-they')(config) - -return unless tags.krb5_ktadd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'krb5.kutil.add', -> + return unless test.tags.krb5_ktadd describe 'schema', -> it 'principal, keyta and password must be provided', -> nikita - krb5: admin: krb5 + krb5: admin: test.krb5 , -> @krb5.ktutil.add {} .should.be.rejectedWith @@ -29,19 +29,19 @@ describe 'krb5.kutil.add', -> they 'create a new keytab', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 $tmpdir: true , ({metadata: {tmpdir}}) -> await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" password: 'nikita123-1' {$status} = await @krb5.ktutil.add - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita.keytab" password: 'nikita123-1' $status.should.be.true() {$status} = await @krb5.ktutil.add - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita.keytab" password: 'nikita123-1' $status.should.be.false() @@ -49,27 +49,27 @@ describe 'krb5.kutil.add', -> they 'detect kvno', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 $tmpdir: true , ({metadata: {tmpdir}}) -> await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" password: 'nikita123-1' await @krb5.ktutil.add - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita_1.keytab" password: 'nikita123-1' await @krb5.execute command: """ - change_password -pw nikita123-2 nikita@#{krb5.realm} + change_password -pw nikita123-2 nikita@#{test.krb5.realm} """ {$status} = await @krb5.ktutil.add - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita_1.keytab" password: 'nikita123-2' $status.should.be.true() {$status} = await @krb5.ktutil.add - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita_1.keytab" password: 'nikita123-2' $status.should.be.false() @@ -77,19 +77,19 @@ describe 'krb5.kutil.add', -> they 'change permission', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 $tmpdir: true , ({metadata: {tmpdir}}) -> await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" password: 'nikita123-1' await @krb5.ktutil.add - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita_1.keytab" password: 'nikita123-1' mode: 0o0755 {$status} = await @krb5.ktutil.add - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita_1.keytab" password: 'nikita123-1' mode: 0o0707 diff --git a/packages/krb5/test/test.coffee b/packages/krb5/test/test.coffee index f17f2b1bb..ccc602447 100644 --- a/packages/krb5/test/test.coffee +++ b/packages/krb5/test/test.coffee @@ -1,14 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config +export default config.default diff --git a/packages/krb5/test/ticket.coffee b/packages/krb5/test/ticket.coffee index 2f13c5b55..f0c5da17f 100644 --- a/packages/krb5/test/ticket.coffee +++ b/packages/krb5/test/ticket.coffee @@ -1,17 +1,17 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, krb5} = require './test' -they = require('mocha-they')(config) - -return unless tags.krb5_addprinc +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'krb5.ticket', -> + return unless test.tags.krb5_addprinc describe 'schema', -> it 'password or keytab must be provided', -> nikita - krb5: admin: krb5 + krb5: admin: test.krb5 , -> @krb5.ticket {} .should.be.rejectedWith @@ -29,20 +29,20 @@ describe 'krb5.ticket', -> they 'create a new ticket with password', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 , -> await @krb5.delprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" password: 'myprecious' await @execute 'kdestroy' {$status} = await @krb5.ticket - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" password: 'myprecious' $status.should.be.true() {$status} = await @krb5.ticket - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" password: 'myprecious' $status.should.be.false() await @execute @@ -51,22 +51,22 @@ describe 'krb5.ticket', -> they 'create a new ticket with a keytab', ({ssh}) -> nikita $ssh: ssh - krb5: admin: krb5 + krb5: admin: test.krb5 $tmpdir: true , ({metadata: {tmpdir}}) -> await @krb5.delprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" await @krb5.addprinc - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" password: 'myprecious' keytab: "#{tmpdir}/nikita.keytab" await @execute 'kdestroy' {$status} = await @krb5.ticket - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita.keytab" $status.should.be.true() {$status} = await @krb5.ticket - principal: "nikita@#{krb5.realm}" + principal: "nikita@#{test.krb5.realm}" keytab: "#{tmpdir}/nikita.keytab" $status.should.be.false() await @execute diff --git a/packages/krb5/test/utils/krb5.coffee b/packages/krb5/test/utils/krb5.coffee index f26e34367..89d8d29a8 100644 --- a/packages/krb5/test/utils/krb5.coffee +++ b/packages/krb5/test/utils/krb5.coffee @@ -1,10 +1,9 @@ -krb5 = require '../../lib/utils/krb5' -{tags, config} = require '../test' - -return unless tags.krb5 +import krb5 from '@nikitajs/krb5/utils/krb5' +import test from '../test.coffee' describe 'utils.krb5', -> + return unless test.tags.krb5 describe 'kinit', -> diff --git a/packages/ldap/README.md b/packages/ldap/README.md index 7badd68a1..32f31aa84 100644 --- a/packages/ldap/README.md +++ b/packages/ldap/README.md @@ -2,3 +2,33 @@ # Nikita "ldap" package The "ldap" package provides Nikita actions for various OpenLDAP operations. + +## Usage + +```js +import "@nikitajs/ldap/register"; +import nikita from "@nikitajs/core"; + +const {$status} = await nikita.ldap.add({ + // Connection + uri: "ldap://openldap:389", + binddn: "cn=admin,dc=domain,dc=com", + passwd: "admin", + // User information + entry: { + dn: "cn=nikita,dc=domain,dc=com", + userPassword: "test", + uid: "nikita", + objectClass: [ "top", "account", "posixAccount", "shadowAccount" ], + shadowLastChange: "15140", + shadowMin: "0", + shadowMax: "99999", + shadowWarning: "7", + loginShell: "/bin/bash", + uidNumber: "9610", + gidNumber: "9610", + homeDirectory: "/home/nikita", + }, +}); +console.info("Entry was modified:", $status); +``` diff --git a/packages/ldap/env/openldap/Dockerfile b/packages/ldap/env/openldap/Dockerfile index 5c3c16a05..5147e1e17 100644 --- a/packages/ldap/env/openldap/Dockerfile +++ b/packages/ldap/env/openldap/Dockerfile @@ -1,38 +1,66 @@ -FROM centos:7.9.2009 -MAINTAINER David Worms +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " -RUN \ - # Install Node dependencies - yum install -y git make && \ - # Install SSH and sudo - yum install -y openssh-server openssh-clients sudo && \ - ssh-keygen -A +# # Install Misc dependencies +# RUN yum install -y zip unzip bzip2 git -## Install OpenLDAP client -RUN yum install -y openldap-clients +# # Install PostgreSQL client +# RUN yum install -y postgresql -RUN yum clean all +# ## Install Mysql client +# RUN yum install -y mysql + +# # Install openssl +# RUN yum install -y openssl + +# # Install docker +# RUN yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo +# RUN yum install -y docker-ce +# RUN curl -L https://github.com/docker/compose/releases/download/1.18.0/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose +# RUN chmod +x /usr/local/bin/docker-compose + +# # Ruby & Gem +# RUN yum install -y gcc ruby ruby-devel + +# # Cgroup +# RUN yum install -y libcgroup-tools + +RUN apt update -y && \ + DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC \ + apt install -y \ + # System + tzdata \ + # Node.js dependencies + build-essential curl git iputils-ping \ + # SSH server and client + openssh-server \ + # Sudo to start ssh + sudo \ + # Java and Kerberos + ldap-utils && \ + # SSH configuration + ssh-keygen -A && \ + mkdir -p /run/sshd ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita WORKDIR /nikita/packages/ldap -# Sudo User -RUN useradd nikita -d /home/nikita && \ - mkdir -p /home/nikita && \ - chown nikita /home/nikita && \ - chmod 700 /home/nikita && \ - echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +# User as sudoer +RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita -# Install Node.js -# Note, CentOS 7.9.2009 incompatible with Node.js >= 18 -RUN \ - curl -L https://git.io/n-install | bash -s -- -y 16.19 -ENV PATH /home/nikita/n/bin:$PATH +# SSH certificate +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys -RUN \ - ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ - cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys +# Node.js +ENV NODE_VERSION stable +RUN curl -L https://git.io/n-install | bash -s -- -y +ENV PATH /home/nikita/n/bin:$PATH ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/ldap/env/openldap/entrypoint.sh b/packages/ldap/env/openldap/entrypoint.sh index 23d193d51..43fbf9c83 100755 --- a/packages/ldap/env/openldap/entrypoint.sh +++ b/packages/ldap/env/openldap/entrypoint.sh @@ -1,21 +1,21 @@ #!/bin/bash set -e +# Source Node.js +. ~/.bashrc # Start ssh daemon sudo /usr/sbin/sshd +# We have TTY, so probably an interactive container... if test -t 0; then - # We have TTY, so probably an interactive container... + # Some command(s) has been passed to container? Execute them and exit. + # No commands provided? Run bash. if [[ $@ ]]; then - # Transfer arguments to mocha - . ~/.bashrc - npx mocha $@ + node_modules/.bin/mocha $@ else - # Run bash when no argument export PS1='[\u@\h : \w]\$ ' /bin/bash fi +# Detached mode else - # Detached mode - . ~/.bashrc npm run test:local fi diff --git a/packages/ldap/env/openldap/run.sh b/packages/ldap/env/openldap/run.sh index 3eaa8bd58..77ee450d9 100755 --- a/packages/ldap/env/openldap/run.sh +++ b/packages/ldap/env/openldap/run.sh @@ -1,4 +1,4 @@ #!/usr/bin/env bash cd `pwd`/`dirname ${BASH_SOURCE}` -docker compose up --abort-on-container-exit +docker compose up --abort-on-container-exit --attach nodejs diff --git a/packages/ldap/env/openldap/test.coffee b/packages/ldap/env/openldap/test.coffee index 8178de758..31638fe0f 100644 --- a/packages/ldap/env/openldap/test.coffee +++ b/packages/ldap/env/openldap/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: api: true ldap: true diff --git a/packages/ldap/lib/acl/index.js b/packages/ldap/lib/acl/index.js index b7dc0683f..553f1277f 100644 --- a/packages/ldap/lib/acl/index.js +++ b/packages/ldap/lib/acl/index.js @@ -1,33 +1,24 @@ // Dependencies -const {is_object_literal, merge} = require('mixme'); -const utils = require('../utils'); -const definitions = require('./schema.json'); +import {is_object_literal, merge} from 'mixme'; +import utils from "@nikitajs/ldap/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { let $status = false; // Get DN if (!config.dn) { - log({ - message: "Get DN of the database to modify", - level: "DEBUG", - }); + log("DEBUG", "Get DN of the database to modify"); const { dn } = await this.ldap.tools.database(config, { suffix: config.suffix, }); config.dn = dn; - log({ - message: `Database DN is ${dn}`, - level: "INFO", - }); + log("INFO", `Database DN is ${dn}`); } for (const acl of config.acls) { // Get ACLs - log({ - message: "List all ACL of the directory", - level: "DEBUG", - }); + log("DEBUG", "List all ACL of the directory"); const { stdout } = await this.ldap.search(config, { attributes: ["olcAccess"], base: `${config.dn}`, @@ -87,36 +78,21 @@ module.exports = { } } if (is_perfect_match) { - log({ - message: "No modification to apply", - level: "INFO", - }); + log("INFO", "No modification to apply"); continue; } if (not_found_acl.length) { - log({ - message: "Modify access after undefined acl", - level: "INFO", - }); + log("INFO", "Modify access after undefined acl"); for (const access_by of olcAccess.by) { not_found_acl.push(access_by); } olcAccess.by = not_found_acl; } else { - log({ - message: "Modify access after reorder", - level: "INFO", - }); - if (typeof log === "function") { - log("nikita `ldap.acl`: m"); - } + log("INFO", "Modify access after reorder"); olcAccess.by = acl.by; } } else { - log({ - message: "Insert a new access", - level: "INFO", - }); + log("INFO", "Insert a new access"); let index = olcAccesses.length; if (acl.first) { // not tested diff --git a/packages/ldap/lib/add/index.js b/packages/ldap/lib/add/index.js index b253455a8..de4c6e2bb 100644 --- a/packages/ldap/lib/add/index.js +++ b/packages/ldap/lib/add/index.js @@ -1,10 +1,10 @@ // Dependencies -const dedent = require("dedent"); -const utils = require("../utils"); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { // Auth related config if (config.uri === true) { @@ -18,7 +18,7 @@ module.exports = { for (const entry of config.entry) { // Check if record already exists // exit code 32 is for "no such object" - const { $status, stdout } = await this.ldap.search(config, { + const { $status } = await this.ldap.search(config, { base: entry.dn, code: [0, 32], scope: "base", @@ -29,7 +29,7 @@ module.exports = { ldif += "\n"; ldif += `dn: ${entry.dn}\n`; ldif += "changetype: add\n"; - [_, k, v] = /^(.*?)=(.+?),.*$/.exec(entry.dn); + const [_, k, v] = /^(.*?)=(.+?),.*$/.exec(entry.dn); ldif += `${k}: ${v}\n`; if (entry[k]) { if (entry[k] !== v) { @@ -59,15 +59,15 @@ module.exports = { "ldapmodify", config.continuous ? "-c" : void 0, config.mesh - ? `-Y ${utils.string.escapeshellarg(config.mesh)}` + ? `-Y ${esa(config.mesh)}` : void 0, config.binddn - ? `-D ${utils.string.escapeshellarg(config.binddn)}` + ? `-D ${esa(config.binddn)}` : void 0, config.passwd - ? `-w ${utils.string.escapeshellarg(config.passwd)}` + ? `-w ${esa(config.passwd)}` : void 0, - config.uri ? `-H ${utils.string.escapeshellarg(config.uri)}` : void 0, + config.uri ? `-H ${esa(config.uri)}` : void 0, ].join(" "), dedent` <<-EOF diff --git a/packages/ldap/lib/delete/index.js b/packages/ldap/lib/delete/index.js index 40e7acbfa..acd16eb3e 100644 --- a/packages/ldap/lib/delete/index.js +++ b/packages/ldap/lib/delete/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { // Auth related config const binddn = config.binddn ? `-D ${config.binddn}` : ''; @@ -15,14 +15,12 @@ module.exports = { // Add related config config.dn = [config.dn]; } - const dn = config.dn.map(function(dn) { - return `'${dn}'`; - }).join(' '); - return (await this.execute({ + const dn = config.dn.map((dn) => `'${dn}'` ).join(' '); + await this.execute({ // Check that the entry exists $if_execute: `ldapsearch ${binddn} ${passwd} ${uri} -b ${dn} -s base`, command: `ldapdelete ${binddn} ${passwd} ${uri} ${dn}` - })); + }); }, metadata: { global: 'ldap', diff --git a/packages/ldap/lib/index/index.js b/packages/ldap/lib/index/index.js index 2760ff84d..c66a05836 100644 --- a/packages/ldap/lib/index/index.js +++ b/packages/ldap/lib/index/index.js @@ -1,31 +1,22 @@ // Dependencies -const utils = require('../utils'); -const definitions = require('./schema.json'); +import utils from "@nikitajs/ldap/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { const indexes = {}; const add = {}; const modify = {}; if (!config.dn) { - log({ - message: "Get DN of the database to modify", - level: "DEBUG", - }); + log("DEBUG", "Get DN of the database to modify"); ({ dn: config.dn } = await this.ldap.tools.database(config, { suffix: config.suffix, })); - log({ - message: `Discovered database DN is ${config.dn}`, - level: "INFO", - }); + log("INFO", `Discovered database DN is ${config.dn}`); } // List all indexes of the directory - log({ - message: "List all indexes of the directory", - level: "DEBUG", - }); + log("DEBUG", "List all indexes of the directory"); const { stdout } = await this.ldap.search(config, { attributes: ["olcDbIndex"], base: `${config.dn}`, diff --git a/packages/ldap/lib/modify/index.js b/packages/ldap/lib/modify/index.js index 6b5459628..970cccc99 100644 --- a/packages/ldap/lib/modify/index.js +++ b/packages/ldap/lib/modify/index.js @@ -1,11 +1,10 @@ // Dependencies -const dedent = require('dedent'); -const utils = require('../utils'); -const definitions = require('./schema.json'); -const esa = utils.string.escapeshellarg; +import dedent from "dedent"; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { // Auth related config if (config.uri === true) { @@ -14,7 +13,7 @@ module.exports = { } config.uri = 'ldapi:///'; } - const uri = config.uri ? `-H ${config.uri}` : ''; // URI is obtained from local openldap conf unless provided + // const uri = config.uri ? `-H ${config.uri}` : ''; // URI is obtained from local openldap conf unless provided // Add related config let ldif = ''; const originals = []; diff --git a/packages/ldap/lib/modify/schema.json b/packages/ldap/lib/modify/schema.json index 9ce3b3b33..ceca843be 100644 --- a/packages/ldap/lib/modify/schema.json +++ b/packages/ldap/lib/modify/schema.json @@ -2,7 +2,7 @@ "config": { "type": "object", "properties": { - "$ref": "module://@nikitajs/ldap/lib/search#/definitions/config", + "$ref": "module://@nikitajs/ldap/search#/definitions/config", "operations": { "type": "array", "items": { diff --git a/packages/ldap/lib/register.js b/packages/ldap/lib/register.js index b7ce1827f..013a86686 100644 --- a/packages/ldap/lib/register.js +++ b/packages/ldap/lib/register.js @@ -1,33 +1,20 @@ -// Generated by CoffeeScript 2.7.0 -// Registration of `nikita.ldap` actions -var registry; +import registry from "@nikitajs/core/registry"; -registry = require('@nikitajs/core/lib/registry'); - -module.exports = { +const actions = { ldap: { - acl: '@nikitajs/ldap/lib/acl', - add: '@nikitajs/ldap/lib/add', - delete: '@nikitajs/ldap/lib/delete', - index: '@nikitajs/ldap/lib/index', - modify: '@nikitajs/ldap/lib/modify', - schema: '@nikitajs/ldap/lib/schema', - search: '@nikitajs/ldap/lib/search', + acl: "@nikitajs/ldap/acl", + add: "@nikitajs/ldap/add", + delete: "@nikitajs/ldap/delete", + index: "@nikitajs/ldap/index", + modify: "@nikitajs/ldap/modify", + schema: "@nikitajs/ldap/schema", + search: "@nikitajs/ldap/search", tools: { - database: '@nikitajs/ldap/lib/tools/database', - databases: '@nikitajs/ldap/lib/tools/databases' + database: "@nikitajs/ldap/tools/database", + databases: "@nikitajs/ldap/tools/databases", }, - user: '@nikitajs/ldap/lib/user' - } + user: "@nikitajs/ldap/user", + }, }; -(async function() { - var err; - try { - return (await registry.register(module.exports)); - } catch (error) { - err = error; - console.error(err.stack); - return process.exit(1); - } -})(); +await registry.register(actions); diff --git a/packages/ldap/lib/schema/index.js b/packages/ldap/lib/schema/index.js index 2f1877fcc..ae8590b72 100644 --- a/packages/ldap/lib/schema/index.js +++ b/packages/ldap/lib/schema/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, metadata: { tmpdir }, tools: { log } }) { // TODO: use nikita.ldap.search instead // Auth related config @@ -37,33 +37,21 @@ module.exports = { await this.fs.mkdir({ target: ldifTmpDir, }); - log({ - message: "Directory ldif created", - level: "DEBUG", - }); + log("DEBUG", "Directory ldif created"); await this.fs.copy({ source: config.schema, target: schema, }); - log({ - message: "Schema copied", - level: "DEBUG", - }); + log("DEBUG", "Schema copied"); await this.file({ content: `include ${schema}`, target: conf, }); - log({ - message: "Configuration generated", - level: "DEBUG", - }); + log("DEBUG", "Configuration generated"); await this.execute({ command: `slaptest -f ${conf} -F ${ldifTmpDir}`, }); - log({ - message: "Configuration validated", - level: "DEBUG", - }); + log("DEBUG", "Configuration validated"); const { $status } = await this.fs.move({ source: `${ldifTmpDir}/cn=config/cn=schema/cn={0}${config.name}.ldif`, target: `${ldifTmpDir}/cn=config/cn=schema/cn=${config.name}.ldif`, @@ -72,10 +60,7 @@ module.exports = { if (!$status) { throw Error("No generated schema"); } - log({ - message: "Configuration renamed", - level: "DEBUG", - }); + log("DEBUG", "Configuration renamed"); await this.file({ target: `${ldifTmpDir}/cn=config/cn=schema/cn=${config.name}.ldif`, write: [ @@ -117,17 +102,11 @@ module.exports = { }, ], }); - log({ - message: "File ldif ready", - level: "DEBUG", - }); + log("DEBUG", "File ldif ready"); await this.execute({ command: `ldapadd ${uri} ${binddn} ${passwd} -f ${ldifTmpDir}/cn=config/cn=schema/cn=${config.name}.ldif`, }); - return log({ - message: `Schema added: ${config.name}`, - level: "INFO", - }); + log("INFO" `Schema added: ${config.name}`); }, metadata: { tmpdir: true, diff --git a/packages/ldap/lib/schema/schema.json b/packages/ldap/lib/schema/schema.json index 888f655ff..d867cdc36 100644 --- a/packages/ldap/lib/schema/schema.json +++ b/packages/ldap/lib/schema/schema.json @@ -2,7 +2,7 @@ "config": { "type": "object", "properties": { - "$ref": "module://@nikitajs/ldap/lib/search#/definitions/config", + "$ref": "module://@nikitajs/ldap/search#/definitions/config", "name": { "type": "string", "description": "Common name of the schema." diff --git a/packages/ldap/lib/search/index.js b/packages/ldap/lib/search/index.js index b2a2c0d01..c67769f73 100644 --- a/packages/ldap/lib/search/index.js +++ b/packages/ldap/lib/search/index.js @@ -1,10 +1,9 @@ // Dependencies -const utils = require('../utils'); -const definitions = require('./schema.json'); -const esa = utils.string.escapeshellarg; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // ## Exports -module.exports = { +export default { handler: async function({config}) { // TODO: use nikita.ldap.search // Auth related config diff --git a/packages/ldap/lib/tools/database/index.js b/packages/ldap/lib/tools/database/index.js index 5863a43f3..8b3c69ed6 100644 --- a/packages/ldap/lib/tools/database/index.js +++ b/packages/ldap/lib/tools/database/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {stdout} = await this.ldap.search(config, { base: config.base, diff --git a/packages/ldap/lib/tools/database/schema.json b/packages/ldap/lib/tools/database/schema.json index 639103164..96409571d 100644 --- a/packages/ldap/lib/tools/database/schema.json +++ b/packages/ldap/lib/tools/database/schema.json @@ -18,7 +18,7 @@ ] }, { - "$ref": "module://@nikitajs/ldap/lib/search" + "$ref": "module://@nikitajs/ldap/search" } ] } diff --git a/packages/ldap/lib/tools/databases/index.js b/packages/ldap/lib/tools/databases/index.js index 587c8c3a0..1c5bbd4c7 100644 --- a/packages/ldap/lib/tools/databases/index.js +++ b/packages/ldap/lib/tools/databases/index.js @@ -1,9 +1,9 @@ // Dependencies -const utils = require("../../utils"); -const definitions = require("./schema.json"); +import utils from "@nikitajs/tools/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { const { stdout } = await this.ldap.search(config, { base: config.base, diff --git a/packages/ldap/lib/tools/databases/schema.json b/packages/ldap/lib/tools/databases/schema.json index 6d050fafb..be6bdfe0d 100644 --- a/packages/ldap/lib/tools/databases/schema.json +++ b/packages/ldap/lib/tools/databases/schema.json @@ -11,7 +11,7 @@ } }, { - "$ref": "module://@nikitajs/ldap/lib/search" + "$ref": "module://@nikitajs/ldap/search" } ] } diff --git a/packages/ldap/lib/user/index.js b/packages/ldap/lib/user/index.js index 1a1f1c1df..6b1e38dbd 100644 --- a/packages/ldap/lib/user/index.js +++ b/packages/ldap/lib/user/index.js @@ -1,10 +1,10 @@ // Dependencies -const {merge} = require('mixme'); -const utils = require('../utils'); -const definitions = require('./schema.json'); +import {merge} from 'mixme'; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { if (!Array.isArray(config.user)) { // User related config @@ -29,17 +29,9 @@ module.exports = { passwd: config.passwd, }); if (added) { - log({ - message: "User added", - level: "WARN", - module: "nikita/ldap/user", - }); + log("WARN", "User added"); } else if (updated) { - log({ - message: "User updated", - level: "WARN", - module: "nikita/ldap/user", - }); + log("WARN", "User updated"); } if (updated || added) { modified = true; @@ -66,25 +58,22 @@ module.exports = { command: [ "ldappasswd", config.mesh - ? `-Y ${utils.string.escapeshellarg(config.mesh)}` + ? `-Y ${esa(config.mesh)}` : void 0, config.binddn - ? `-D ${utils.string.escapeshellarg(config.binddn)}` + ? `-D ${esa(config.binddn)}` : void 0, config.passwd - ? `-w ${utils.string.escapeshellarg(config.passwd)}` + ? `-w ${esa(config.passwd)}` : void 0, config.uri - ? `-H ${utils.string.escapeshellarg(config.uri)}` + ? `-H ${esa(config.uri)}` : void 0, `-s ${user.userPassword}`, - `${utils.string.escapeshellarg(user.dn)}`, + `${esa(user.dn)}`, ].join(" "), }); - log({ - message: "Password modified", - level: "WARN", - }); + log("WARN", "Password modified"); modified = true; } } diff --git a/packages/ldap/lib/utils/index.js b/packages/ldap/lib/utils/index.js index 288d42ba3..ed6c5389a 100644 --- a/packages/ldap/lib/utils/index.js +++ b/packages/ldap/lib/utils/index.js @@ -1,8 +1,8 @@ -const utils = require('@nikitajs/core/lib/utils'); -const ldap = require('./ldap'); +import utils from "@nikitajs/core/utils"; +import ldap from '@nikitajs/ldap/utils/ldap'; -module.exports = { +export default { ...utils, ldap: ldap }; diff --git a/packages/ldap/lib/utils/ldap.js b/packages/ldap/lib/utils/ldap.js index c1e2a724e..07a599314 100644 --- a/packages/ldap/lib/utils/ldap.js +++ b/packages/ldap/lib/utils/ldap.js @@ -1,5 +1,5 @@ -module.exports = { +export default { acl: { /* ## Parse ACLs diff --git a/packages/ldap/package.json b/packages/ldap/package.json index 67edbd7a8..fbb1f8c57 100644 --- a/packages/ldap/package.json +++ b/packages/ldap/package.json @@ -1,5 +1,6 @@ { "name": "@nikitajs/ldap", + "version": "1.0.0-alpha.3", "description": "Provides Nikita actions for various OpenLDAP operations.", "keywords": [ "nikita", @@ -11,7 +12,6 @@ "system", "task" ], - "version": "1.0.0-alpha.3", "author": "David Worms ", "bugs": { "url": "https://github.com/adaltas/node-nikita/issues" @@ -47,20 +47,26 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + "./register": "./lib/register.js", + "./utils": "./lib/utils/index.js", + "./utils/*": "./lib/utils/*.js", + "./*": "./lib/*/index.js" + }, "files": [ "/lib" ], "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/ldap/lib/register" - ], "inline-diffs": true, - "timeout": 20000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/ldap/register", + "should" + ], + "throw-deprecation": true, + "timeout": 20000 }, "publishConfig": { "access": "public" @@ -79,5 +85,6 @@ }, "dependencies": { "mixme": "^1.0.0" - } + }, + "type": "module" } diff --git a/packages/ldap/test.sample.coffee b/packages/ldap/test.sample.coffee index 201a7bb56..9f62b7abd 100644 --- a/packages/ldap/test.sample.coffee +++ b/packages/ldap/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: api: true ldap: false @@ -24,5 +24,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/ldap/test/acl.coffee b/packages/ldap/test/acl.coffee index 6382bafeb..81a70e15a 100644 --- a/packages/ldap/test/acl.coffee +++ b/packages/ldap/test/acl.coffee @@ -1,24 +1,24 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ldap} = require './test' -they = require('mocha-they')(config) -utils = require '../lib/utils' - -return unless tags.ldap_acl +import nikita from '@nikitajs/core' +import utils from '@nikitajs/ldap/utils' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ldap.acl', -> + return unless test.tags.ldap_acl client = olcAccesses = olcDatabase = null beforeEach -> {database: olcDatabase} = await nikita.ldap.tools.database - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd - suffix: ldap.suffix_dn + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd + suffix: test.ldap.suffix_dn {stdout} = await nikita.ldap.search - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd base: "olcDatabase=#{olcDatabase},cn=config" attributes: ['olcAccess'] scope: 'base' @@ -27,9 +27,9 @@ describe 'ldap.acl', -> .map (line) -> line.split(':')[1].trim() afterEach -> await nikita.ldap.modify - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd operations: dn: "olcDatabase=#{olcDatabase},cn=config" changetype: 'modify' @@ -46,13 +46,13 @@ describe 'ldap.acl', -> they 'create a new permission', ({ssh}) -> nikita ldap: - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd $ssh: ssh , -> {$status} = await @ldap.acl - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn acls: to: 'dn.base="dc=test,dc=com"' by: [ @@ -60,7 +60,7 @@ describe 'ldap.acl', -> ] $status.should.be.true() {$status} = await @ldap.acl - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn acls: to: 'dn.base="dc=test,dc=com"' by: [ @@ -71,27 +71,27 @@ describe 'ldap.acl', -> they 'respect order in creation', ({ssh}) -> nikita ldap: - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd $ssh: ssh , -> @ldap.acl - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn acls: to: 'dn.base="ou=test1,dc=test,dc=com"' by: [ 'dn.base="gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" read' ] @ldap.acl - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn acls: to: 'dn.base="ou=test2,dc=test,dc=com"' by: [ 'dn.base="gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" read' ] @ldap.acl - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn acls: to: 'dn.base="ou=INSERTED,dc=test,dc=com"' place_before: 'dn.base="ou=test2,dc=test,dc=com"' @@ -99,7 +99,7 @@ describe 'ldap.acl', -> 'dn.base="gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" read' ] {dn} = await @ldap.tools.database - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn {stdout} = await @ldap.search base: dn scope: 'base' diff --git a/packages/ldap/test/add.coffee b/packages/ldap/test/add.coffee index a25e0d4aa..3072705bc 100644 --- a/packages/ldap/test/add.coffee +++ b/packages/ldap/test/add.coffee @@ -1,14 +1,14 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ldap} = require './test' -they = require('mocha-they')(config) - -return unless tags.ldap +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ldap.add', -> + return unless test.tags.ldap user = - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" userPassword: 'test' uid: 'nikita' objectClass: [ 'top', 'account', 'posixAccount', 'shadowAccount' ] @@ -24,35 +24,35 @@ describe 'ldap.add', -> they 'add new entry', ({ssh}) -> nikita ldap: - binddn: ldap.binddn - passwd: ldap.passwd - uri: ldap.uri + binddn: test.ldap.binddn + passwd: test.ldap.passwd + uri: test.ldap.uri $ssh: ssh , -> @ldap.delete - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" {$status} = await @ldap.add entry: user $status.should.be.true() @ldap.delete - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" they 'add existing entry', ({ssh}) -> nikita ldap: - binddn: ldap.binddn - passwd: ldap.passwd - uri: ldap.uri + binddn: test.ldap.binddn + passwd: test.ldap.passwd + uri: test.ldap.uri $ssh: ssh , -> @ldap.delete - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" {$status} = await @ldap.add entry: user - exclude: ['userPassword'] + # exclude: ['userPassword'] {$status} = await @ldap.add entry: user - exclude: ['userPassword'] + # exclude: ['userPassword'] $status.should.be.false() @ldap.delete - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" diff --git a/packages/ldap/test/index.coffee b/packages/ldap/test/index.coffee index c353a5f6a..1aea2473f 100644 --- a/packages/ldap/test/index.coffee +++ b/packages/ldap/test/index.coffee @@ -1,24 +1,24 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ldap} = require './test' -they = require('mocha-they')(config) -utils = require '../lib/utils' - -return unless tags.ldap_index +import nikita from '@nikitajs/core' +import utils from '@nikitajs/ldap/utils' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ldap.index', -> + return unless test.tags.ldap_index olcDatabase = olcDbIndexes = null beforeEach -> {database: olcDatabase} = await nikita.ldap.tools.database - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd - suffix: ldap.suffix_dn + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd + suffix: test.ldap.suffix_dn {stdout} = await nikita.ldap.search - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd base: "olcDatabase=#{olcDatabase},cn=config" attributes:['olcDbIndex'] scope: 'base' @@ -27,9 +27,9 @@ describe 'ldap.index', -> .map (line) -> line.split(':')[1].trim() afterEach -> nikita.ldap.modify - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd operations: dn: "olcDatabase=#{olcDatabase},cn=config" changetype: 'modify' @@ -46,13 +46,13 @@ describe 'ldap.index', -> they 'create a new index from dn', ({ssh}) -> nikita ldap: - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd $ssh: ssh , -> {dn} = await @ldap.tools.database - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn {$status} = await @ldap.index dn: dn indexes: @@ -67,18 +67,18 @@ describe 'ldap.index', -> they 'create a new index from suffix', ({ssh}) -> nikita ldap: - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd $ssh: ssh , -> {$status} = await @ldap.index - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn indexes: aliasedEntryName: 'eq' $status.should.be.true() {$status} = await @ldap.index - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn indexes: aliasedEntryName: 'eq' $status.should.be.false() @@ -86,24 +86,24 @@ describe 'ldap.index', -> they 'update an existing index', ({ssh}) -> nikita ldap: - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd $ssh: ssh , -> # Set initial value await @ldap.index - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn indexes: aliasedEntryName: 'eq' # Apply the update {$status} = await @ldap.index - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn indexes: aliasedEntryName: 'pres,eq' $status.should.be.true() {$status} = await @ldap.index - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn indexes: aliasedEntryName: 'pres,eq' $status.should.be.false() diff --git a/packages/ldap/test/modify.coffee b/packages/ldap/test/modify.coffee index cce89f177..fa3a3b136 100644 --- a/packages/ldap/test/modify.coffee +++ b/packages/ldap/test/modify.coffee @@ -1,22 +1,22 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ldap} = require './test' -they = require('mocha-they')(config) - -return unless tags.ldap_user +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ldap.modify', -> + return unless test.tags.ldap_user they 'entry with password', ({ssh}) -> nikita ldap: - binddn: ldap.binddn - passwd: ldap.passwd - uri: ldap.uri + binddn: test.ldap.binddn + passwd: test.ldap.passwd + uri: test.ldap.uri $ssh: ssh , -> entry = - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" userPassword: 'secret' uid: 'nikita' objectClass: [ 'top', 'account', 'posixAccount' ] @@ -48,13 +48,13 @@ describe 'ldap.modify', -> they 'entry with array', ({ssh}) -> nikita ldap: - binddn: ldap.binddn - passwd: ldap.passwd - uri: ldap.uri + binddn: test.ldap.binddn + passwd: test.ldap.passwd + uri: test.ldap.uri $ssh: ssh , -> entry = - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" objectClass: [ 'top', 'posixGroup' ] cn: 'nikita' gidNumber: '3000' diff --git a/packages/ldap/test/schema.coffee b/packages/ldap/test/schema.coffee index 5b9ebe1cf..069d552e8 100644 --- a/packages/ldap/test/schema.coffee +++ b/packages/ldap/test/schema.coffee @@ -1,22 +1,22 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ldap} = require './test' -they = require('mocha-they')(config) - -return unless tags.ldap +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ldap.schema', -> + return unless test.tags.ldap they 'entry with password', ({ssh}) -> nikita ldap: - binddn: ldap.binddn - passwd: ldap.passwd - uri: ldap.uri + binddn: test.ldap.binddn + passwd: test.ldap.passwd + uri: test.ldap.uri $ssh: ssh , -> entry = - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" userPassword: 'secret' uid: 'nikita' objectClass: [ 'top', 'account', 'posixAccount' ] @@ -32,7 +32,7 @@ describe 'ldap.schema', -> value: 'newsecret' ] ] - @ldap.delete + await @ldap.delete dn: entry.dn {$status} = await @ldap.add entry: entry @@ -42,6 +42,6 @@ describe 'ldap.schema', -> {$status} = await @ldap.modify operations: operations $status.should.be.false() - @ldap.delete + await @ldap.delete dn: entry.dn \ No newline at end of file diff --git a/packages/ldap/test/search.coffee b/packages/ldap/test/search.coffee index 4a3887ebb..5c5d0d33e 100644 --- a/packages/ldap/test/search.coffee +++ b/packages/ldap/test/search.coffee @@ -1,20 +1,20 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ldap} = require './test' -they = require('mocha-they')(config) - -return unless tags.ldap +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ldap.search', -> + return unless test.tags.ldap they 'with scope base', ({ssh}) -> nikita ldap: - binddn: ldap.binddn - passwd: ldap.passwd - uri: ldap.uri + binddn: test.ldap.binddn + passwd: test.ldap.passwd + uri: test.ldap.uri $ssh: ssh , -> {stdout} = await @ldap.search - base: "#{ldap.suffix_dn}" + base: "#{test.ldap.suffix_dn}" stdout.should.containEql 'dn: dc=example,dc=org' diff --git a/packages/ldap/test/test.coffee b/packages/ldap/test/test.coffee index f17f2b1bb..ccc602447 100644 --- a/packages/ldap/test/test.coffee +++ b/packages/ldap/test/test.coffee @@ -1,14 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config +export default config.default diff --git a/packages/ldap/test/tools/database.coffee b/packages/ldap/test/tools/database.coffee index 215a83886..31fc51eb2 100644 --- a/packages/ldap/test/tools/database.coffee +++ b/packages/ldap/test/tools/database.coffee @@ -1,17 +1,17 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ldap} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ldap +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ldap.database', -> + return unless test.tags.ldap describe 'schema', -> it 'require `suffix`', -> nikita.ldap.tools.database - uri: ldap.uri + uri: test.ldap.uri .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' message: /config must have required property 'suffix'/ @@ -19,7 +19,7 @@ describe 'ldap.database', -> it 'extends ldap.search', -> nikita.ldap.tools.database uri: invalid: 'value' - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' message: /config\/uri must be string/ @@ -27,7 +27,7 @@ describe 'ldap.database', -> it 'provide an immutable value to `base`', -> nikita.ldap.tools.database base: 'invalid' - suffix: ldap.suffix_dn + suffix: test.ldap.suffix_dn .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' message: /config\/base must be equal to constant, allowedValue is "cn=config"/ @@ -40,9 +40,9 @@ describe 'ldap.database', -> $ssh: ssh , -> {dn, database} = await @ldap.tools.database - suffix: ldap.suffix_dn - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd + suffix: test.ldap.suffix_dn + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd dn.should.match /^olcDatabase=\{\d+\}\w+,cn=config$/ database.should.match /^\{\d+\}\w+$/ diff --git a/packages/ldap/test/tools/databases.coffee b/packages/ldap/test/tools/databases.coffee index b79f09e47..ecd344f86 100644 --- a/packages/ldap/test/tools/databases.coffee +++ b/packages/ldap/test/tools/databases.coffee @@ -1,20 +1,20 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ldap} = require '../test' -they = require('mocha-they')(config) - -return unless tags.ldap +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ldap.databases', -> + return unless test.tags.ldap they 'create a new index', ({ssh}) -> nikita $ssh: ssh , -> {databases} = await @ldap.tools.databases - suffix: ldap.suffix_dn - uri: ldap.uri - binddn: ldap.config.binddn - passwd: ldap.config.passwd + suffix: test.ldap.suffix_dn + uri: test.ldap.uri + binddn: test.ldap.config.binddn + passwd: test.ldap.config.passwd for database in databases database.should.match /^\{-?\d+\}\w+$/ diff --git a/packages/ldap/test/user.coffee b/packages/ldap/test/user.coffee index b8fc9c5e2..9478ad31c 100644 --- a/packages/ldap/test/user.coffee +++ b/packages/ldap/test/user.coffee @@ -1,25 +1,25 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ldap} = require './test' -they = require('mocha-they')(config) - -return unless tags.ldap_user +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'ldap.user', -> + return unless test.tags.ldap_user they 'create a new user', ({ssh}) -> nikita ldap: - binddn: ldap.binddn - passwd: ldap.passwd - uri: ldap.uri + binddn: test.ldap.binddn + passwd: test.ldap.passwd + uri: test.ldap.uri $ssh: ssh , -> @ldap.delete - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" {$status} = await @ldap.user user: - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" userPassword: 'test' uid: 'nikita' objectClass: [ 'top', 'account', 'posixAccount', 'shadowAccount' ] @@ -33,11 +33,11 @@ describe 'ldap.user', -> homeDirectory: '/home/nikita' $status.should.be.true() @ldap.delete - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" they 'detect no change', ({ssh}) -> user = - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" userPassword: 'test' uid: 'nikita' objectClass: [ 'top', 'account', 'posixAccount', 'shadowAccount' ] @@ -46,17 +46,17 @@ describe 'ldap.user', -> homeDirectory: '/home/nikita' nikita ldap: - binddn: ldap.binddn - passwd: ldap.passwd - uri: ldap.uri + binddn: test.ldap.binddn + passwd: test.ldap.passwd + uri: test.ldap.uri $ssh: ssh , -> @ldap.delete - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" @ldap.user user: user {$status} = await @ldap.user user: user $status.should.be.false() @ldap.delete - dn: "cn=nikita,#{ldap.suffix_dn}" + dn: "cn=nikita,#{test.ldap.suffix_dn}" diff --git a/packages/ldap/test/utils/ldap.coffee b/packages/ldap/test/utils/ldap.coffee index d8ce30c0f..8653b371f 100644 --- a/packages/ldap/test/utils/ldap.coffee +++ b/packages/ldap/test/utils/ldap.coffee @@ -1,10 +1,9 @@ -ldap = require '../../lib/utils/ldap' -{tags} = require '../test' - -return unless tags.api +import ldap from '@nikitajs/ldap/utils/ldap' +import test from '../test.coffee' describe 'utils.ldap acl', -> + return unless test.tags.api it 'parse', -> ldap.acl diff --git a/packages/log/README.md b/packages/log/README.md index f541f5d30..19f62078d 100644 --- a/packages/log/README.md +++ b/packages/log/README.md @@ -2,3 +2,20 @@ # Nikita "log" package The "log" package provides Nikita actions for logging. + +## Usage + +```js +import "@nikitajs/log/register"; +import nikita from "@nikitajs/core"; + +await nikita + .log.cli() + .log.csv({ + basedir: "~/.log", + }) + .execute({ + $header: "Whoami", + command: "whoami", + }); +``` diff --git a/packages/log/lib/cli/index.js b/packages/log/lib/cli/index.js index 40290c960..5152b18da 100644 --- a/packages/log/lib/cli/index.js +++ b/packages/log/lib/cli/index.js @@ -1,9 +1,9 @@ // Dependencies -const colors = require('colors/safe'); -const {merge} = require('mixme'); -const pad = require('pad'); -const utils = require('@nikitajs/core/lib/utils'); -const definitions = require('./schema.json'); +import colors from 'colors/safe.js'; +import {merge} from 'mixme'; +import pad from 'pad'; +import utils from '@nikitajs/core/utils'; +import definitions from "./schema.json" assert { type: "json" }; // Utils const format_line = function ({ host, header, status, time }, config) { @@ -28,7 +28,7 @@ const format_line = function ({ host, header, status, time }, config) { }; // Action -module.exports = { +export default { ssh: false, handler: function ({ config }) { // Normalize diff --git a/packages/log/lib/cli/schema.json b/packages/log/lib/cli/schema.json index 6b24689fd..876706953 100644 --- a/packages/log/lib/cli/schema.json +++ b/packages/log/lib/cli/schema.json @@ -46,7 +46,7 @@ "description": "Activate or desactivate logging." }, "end": { - "$ref": "module://@nikitajs/log/lib/stream#/definitions/config/properties/end", + "$ref": "module://@nikitajs/log/stream#/definitions/config/properties/end", "default": false, "description": "Close the stream when the Nikita session terminates. The default\nis to not close the stream for this action, in opposite to the default\n`log.stream` action, because the default stream is `process.stderr`\nwhich is expected to remain open." }, @@ -105,12 +105,12 @@ "description": "Separator between columns. A string value apply the same separator\nwhile it is also possible to target a specific sperator per column\nby setting an object." }, "serializer": { - "$ref": "module://@nikitajs/log/lib/stream#/definitions/config/properties/serializer", + "$ref": "module://@nikitajs/log/stream#/definitions/config/properties/serializer", "default": {}, "description": "Internal property, expose access to the serializer object passed\nto the `log.stream` action." }, "stream": { - "$ref": "module://@nikitajs/log/lib/stream#/definitions/config/properties/stream", + "$ref": "module://@nikitajs/log/stream#/definitions/config/properties/stream", "description": "The writable stream where to print the logs, default to\n`process.stderr`." } } diff --git a/packages/log/lib/csv/index.js b/packages/log/lib/csv/index.js index cd02bf372..d180d0a63 100644 --- a/packages/log/lib/csv/index.js +++ b/packages/log/lib/csv/index.js @@ -1,9 +1,9 @@ // Dependencies -const {merge} = require('mixme'); -const definitions = require('./schema.json'); +import {merge} from 'mixme'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: function({config}) { const serializer = { 'nikita:action:start': function({action}) { diff --git a/packages/log/lib/csv/schema.json b/packages/log/lib/csv/schema.json index 2dfea6f2c..f02fc1dcd 100644 --- a/packages/log/lib/csv/schema.json +++ b/packages/log/lib/csv/schema.json @@ -12,7 +12,7 @@ } }, { - "$ref": "module://@nikitajs/log/lib/fs#/definitions/config" + "$ref": "module://@nikitajs/log/fs#/definitions/config" } ] } diff --git a/packages/log/lib/fs/index.js b/packages/log/lib/fs/index.js index 275e25f21..4622b687d 100644 --- a/packages/log/lib/fs/index.js +++ b/packages/log/lib/fs/index.js @@ -1,10 +1,10 @@ // Dependencies -const fs = require('fs'); -const path = require('path'); -const definitions = require('./schema.json'); +import fs from 'node:fs' +import path from 'node:path' +import definitions from "./schema.json" assert { type: "json" }; -// ## Exports -module.exports = { +// Action +export default { handler: async function({config}) { // Normalization let logdir = path.dirname(config.filename); @@ -36,24 +36,21 @@ module.exports = { } await this.log.stream(config); // Handle link to latest directory - return (await this.fs.base.symlink({ + await this.fs.base.symlink({ $if: latestdir, source: logdir, target: latestdir - })); + }); }, hooks: { on_action: { - before: ['@nikitajs/core/lib/plugins/metadata/schema'], - after: ['@nikitajs/core/lib/plugins/ssh'], + before: ['@nikitajs/core/plugins/metadata/schema'], + after: ['@nikitajs/core/plugins/ssh'], handler: function({config, ssh}) { - var ref; // With ssh, filename contain the host or ip address - if (config.filename == null) { - config.filename = `${ssh?.config?.host || 'local'}.log`; - } + config.filename ??= `${ssh?.config?.host || 'local'}.log`; // Log is always local - return config.ssh = false; + config.ssh = false; } } }, diff --git a/packages/log/lib/md/index.js b/packages/log/lib/md/index.js index f3b104c6e..4bb9b52be 100644 --- a/packages/log/lib/md/index.js +++ b/packages/log/lib/md/index.js @@ -1,10 +1,10 @@ // Dependencies -const {merge} = require('mixme'); -const definitions = require('./schema.json'); +import {merge} from 'mixme'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const state = {}; const serializer = { @@ -93,7 +93,7 @@ module.exports = { 'text': function(log) { const out = []; out.push(`\n${log.message}`); - if (log.module && log.module !== '@nikitajs/core/lib/actions/call') { + if (log.module && log.module !== '@nikitajs/core/actions/call') { out.push(` (${log.depth}.${log.level}, written by ${log.module})`); } out.push("\n"); diff --git a/packages/log/lib/md/schema.json b/packages/log/lib/md/schema.json index ed123c914..3ca618c17 100644 --- a/packages/log/lib/md/schema.json +++ b/packages/log/lib/md/schema.json @@ -22,7 +22,7 @@ } }, { - "$ref": "module://@nikitajs/log/lib/fs#/definitions/config" + "$ref": "module://@nikitajs/log/fs#/definitions/config" } ] } diff --git a/packages/log/lib/register.js b/packages/log/lib/register.js index bc4749efb..87415069f 100644 --- a/packages/log/lib/register.js +++ b/packages/log/lib/register.js @@ -1,23 +1,15 @@ - // Dependencies -const registry = require('@nikitajs/core/lib/registry'); +import registry from "@nikitajs/core/registry"; // Action registration -module.exports = { +const actions = { log: { - cli: '@nikitajs/log/lib/cli', - csv: '@nikitajs/log/lib/csv', - fs: '@nikitajs/log/lib/fs', - md: '@nikitajs/log/lib/md', - stream: '@nikitajs/log/lib/stream' - } + cli: "@nikitajs/log/cli", + csv: "@nikitajs/log/csv", + fs: "@nikitajs/log/fs", + md: "@nikitajs/log/md", + stream: "@nikitajs/log/stream", + }, }; -(async function() { - try { - return (await registry.register(module.exports)); - } catch (error) { - console.error(error.stack); - return process.exit(1); - } -})(); +await registry.register(actions); diff --git a/packages/log/lib/stream/index.js b/packages/log/lib/stream/index.js index 1cf436d22..8281080cf 100644 --- a/packages/log/lib/stream/index.js +++ b/packages/log/lib/stream/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { ssh: false, handler: function ({ config, tools: { events } }) { // Events diff --git a/packages/log/package.json b/packages/log/package.json index c3cd103a7..26e72261f 100644 --- a/packages/log/package.json +++ b/packages/log/package.json @@ -71,6 +71,10 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + "./register": "./lib/register.js", + "./*": "./lib/*/index.js" + }, "files": [ "/lib" ], @@ -81,16 +85,16 @@ "license": "MIT", "main": "./lib/index", "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/log/lib/register" - ], "inline-diffs": true, - "timeout": 10000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/log/register", + "should" + ], + "throw-deprecation": true, + "timeout": 3000 }, "repository": { "type": "git", @@ -100,5 +104,6 @@ "scripts": { "test": "npm run test:local", "test:local": "mocha 'test/**/*.coffee'" - } + }, + "type": "module" } diff --git a/packages/log/test.sample.coffee b/packages/log/test.sample.coffee index 8efed6669..c43cc3459 100644 --- a/packages/log/test.sample.coffee +++ b/packages/log/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: posix: true config: [ @@ -12,5 +12,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/log/test/cli.coffee b/packages/log/test/cli.coffee index 0556ee0f0..8807b1da7 100644 --- a/packages/log/test/cli.coffee +++ b/packages/log/test/cli.coffee @@ -1,12 +1,10 @@ -fs = require 'fs' -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) +import { Writable } from 'node:stream' +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) -return unless tags.posix - -Writable = require('stream').Writable class MyWritable extends Writable constructor: (data) -> super() @@ -16,6 +14,7 @@ class MyWritable extends Writable callback() describe 'log.cli', -> + return unless test.tags.posix describe 'schema', -> diff --git a/packages/log/test/csv.coffee b/packages/log/test/csv.coffee index 74ca5cf12..6be3c58e6 100644 --- a/packages/log/test/csv.coffee +++ b/packages/log/test/csv.coffee @@ -1,12 +1,11 @@ -fs = require 'fs' -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'log.csv', -> + return unless test.tags.posix they 'write message', ({ssh}) -> nikita diff --git a/packages/log/test/fs.coffee b/packages/log/test/fs.coffee index 4be2b2ce1..d93227ca3 100644 --- a/packages/log/test/fs.coffee +++ b/packages/log/test/fs.coffee @@ -1,12 +1,12 @@ -utils = require '@nikitajs/core/lib/utils' -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'log.fs', -> + return unless test.tags.posix they 'requires option "serializer"', ({ssh}) -> nikita @@ -23,6 +23,7 @@ describe 'log.fs', -> ].join ' ' they 'serializer can be empty', ({ssh}) -> + return unless ssh nikita $ssh: ssh $tmpdir: true @@ -55,7 +56,6 @@ describe 'log.fs', -> nikita $ssh: ssh $tmpdir: true - $dirty: true , ({metadata: {tmpdir}}) -> await @log.fs basedir: tmpdir @@ -90,7 +90,6 @@ describe 'log.fs', -> nikita $ssh: ssh $tmpdir: true - # $dirty: true , ({metadata: {tmpdir}})-> await @log.fs basedir: tmpdir diff --git a/packages/log/test/md.coffee b/packages/log/test/md.coffee index d3eaf24a7..9d629ae53 100644 --- a/packages/log/test/md.coffee +++ b/packages/log/test/md.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'log.md', -> + return unless test.tags.posix they 'write entering message', ({ssh}) -> nikita @@ -250,7 +250,7 @@ describe 'log.md', -> target: "#{tmpdir}/#{ssh?.host or 'local'}.log" encoding: 'ascii' .should.be.resolvedWith - data: "\nEntering @nikitajs/core/lib/actions/call (1.3)\n" + data: "\nEntering @nikitajs/core/actions/call (1.3)\n" they 'filtered out for bastards action', ({ssh}) -> nikita @@ -266,4 +266,4 @@ describe 'log.md', -> target: "#{tmpdir}/#{ssh?.host or 'local'}.log" encoding: 'ascii' .should.be.resolvedWith - data: "\nEntering @nikitajs/core/lib/actions/call (1.2)\n" + data: "\nEntering @nikitajs/core/actions/call (1.2)\n" diff --git a/packages/log/test/test.coffee b/packages/log/test/test.coffee index f17f2b1bb..ccc602447 100644 --- a/packages/log/test/test.coffee +++ b/packages/log/test/test.coffee @@ -1,14 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config +export default config.default diff --git a/packages/lxd/README.md b/packages/lxd/README.md index 155f57183..bd723ca2d 100644 --- a/packages/lxd/README.md +++ b/packages/lxd/README.md @@ -13,60 +13,37 @@ The tests require a local LXD client. On a Linux hosts, you can follow the [inst npm test ``` -## Notes - -### Networks - -The LXD tests create two bridge networks: - -* Nikita LXD public: `nktlxdpub`, `192.0.2.1/30` (reserved IP subnet ssigned as TEST-NET-1) -* Nikita LXD private: `nktlxdprv`, `192.0.2.5/30` (reserved IP subnet ssigned as TEST-NET-1) +## Usage + +```js +import "@nikitajs/lxd/register"; +import nikita from "@nikitajs/core"; + +const {$status} = await nikita.lxc.init({ + image: "images:alpine/latest", + container: "nikita-list-vm1", + vm: true, +}); +console.info("Machine was created:", $status); +``` -To avoid collision, other tests must create and use their own bridge. +## Notes ### Windows and MacOS users -LXD is only available on Linux. To work around this limitation, we run LXD in a virtual machine which is managed by [Vagrant](https://www.vagrantup.com/) and [VirtualBox](https://www.virtualbox.org/). - -The Nikita project folder is mounted in `/nikita` inside the VM. The LXD tests don't need to know about it because they only interact with the local `lxc` command. For the tests who need to know this path, the location of the Nikita folder inside the VM can be defined with `export NIKITA_HOME=/nikita`. For example, the FreeIPA tests in 'packages/ipa/env/ipa' use it. - -The procedure is abstracted inside the `./bin/cluster start` command. Below are the manual commands to make it work if you wish to do it yourself. +LXD is only available on Linux. To work around this limitation, we run LXD in a virtual machine. -Install: +We provide a script to run LXD inside Multipass which also run on MacOS ARM architecture: ```bash -# Initialize the VM with multipass -# (compatible with macos silicon) -cd assets && ./multipass.sh && cd .. -# Initialize the VM with Vagrant -# cd assets && vagrant up && cd .. -# Set up LXD client -lxc remote add nikita 127.0.0.1:8443 -lxc remote switch nikita -# Initialize the container -npx coffee start.coffee +./assets/multipass.sh ``` -Update the VM: - -```bash -lxc remote switch local -lxc remote remove nikita -lxc remote add nikita --accept-certificate --password secret 127.0.0.1:8443 -lxc remote switch nikita -``` - -When using a host VM, the test `test/goodies/prlimit.coffee` will fail because -it is expected to run on the LXC host machine and the not machine where Nikita -is executed. The property `tags.lxd_prlimit` must be `false` in `test.coffee`. - -### Permission denied on tmp +### Networks -[FreeIPA install issue](https://bugzilla.redhat.com/show_bug.cgi?id=1678793) +The LXD tests create two bridge networks: -``` -[1/29]: configuring certificate server instance -[error] IOError: [Errno 13] Permission denied: '/tmp/tmp_Tm1l_' -``` +* Nikita LXD public: `nktlxdpub`, `192.0.2.1/30` (reserved IP subnet ssigned as TEST-NET-1) +* Nikita LXD private: `nktlxdprv`, `192.0.2.5/30` (reserved IP subnet ssigned as TEST-NET-1) -Host must have `fs.protected_regular` set to `0`, eg `echo '0' > /proc/sys/fs/protected_regular && sysctl -p && sysctl -a`. In our Physical -> VM -> LXD setup, the parameters shall be set in the VM, no restart is required to install the FreeIPA server, just uninstall it first with `ipa-server-install --uninstall` before re-executing the install command. +To avoid collision, other tests must create and use their own bridge. diff --git a/packages/lxd/assets/multipass.sh b/packages/lxd/assets/multipass.sh index 3215c5313..5bb8b7098 100755 --- a/packages/lxd/assets/multipass.sh +++ b/packages/lxd/assets/multipass.sh @@ -3,12 +3,6 @@ set -e # Logs: `ll /Library/Logs/Multipass` -# MacOS crash and image cant start: -# see https://github.com/canonical/multipass/issues/1924 -# Restart multipass: -# sudo pkill multipassd -# multipass start nikita - brew list | grep -x "bash-completion" || brew install bash-completion brew list | grep -x "multipass" || brew install --cask multipass @@ -34,6 +28,22 @@ multipass launch \ # Solution: # Re-run `multipass mount $HOME nikita` +# Multipass does not respond +# Solution: +# restart multipass +# sudo launchctl stop com.canonical.multipassd +# sudo launchctl start com.canonical.multipassd + +# MacOS crash and image cant start: +# see https://github.com/canonical/multipass/issues/1924 +# Force kill multipass, it shall restart: +# sudo pkill multipassd +# multipass start nikita + +# Fix DNS +multipass exec nikita -- sudo bash -c "echo 'DNS=8.8.8.8' >> /etc/systemd/resolved.conf" +multipass exec nikita -- sudo systemctl restart systemd-resolved + # LXD installation multipass exec nikita -- sudo apt update multipass exec nikita -- sudo apt upgrade -y @@ -45,7 +55,7 @@ multipass exec nikita -- sudo truncate -s 100G /opt/zfs.img multipass exec nikita -- sudo zpool create lxd /opt/zfs.img multipass exec nikita -- lxd init --auto --storage-backend=zfs --storage-pool=lxd -# LXD configurqtion +# LXD configuration multipass exec nikita -- lxc config set core.https_address '[::]:8443' multipass exec nikita -- lxc config set core.trust_password "secret" multipass exec nikita -- lxc config set images.remote_cache_expiry 30 diff --git a/packages/lxd/lib/cluster/cli/index.js b/packages/lxd/lib/cluster/cli/index.js index 10a339993..c4cc4ab37 100644 --- a/packages/lxd/lib/cluster/cli/index.js +++ b/packages/lxd/lib/cluster/cli/index.js @@ -1,39 +1,41 @@ // Dependencies -const shell = require('shell'); +import shell from "shell"; +import start from "./start.js"; +import stop from "./stop.js"; shell({ - name: 'lxdvmhost', + name: "lxdvmhost", description: "LXD VM host based on Virtual Box", commands: { - 'start': { - description: 'Start the cluster', + start: { + description: "Start the cluster", options: { debug: { - type: 'boolean', - shortcut: 'b', - description: 'Print debug output' + type: "boolean", + shortcut: "b", + description: "Print debug output", }, log: { - type: 'string', - description: 'Path to the directory storing logs.' - } + type: "string", + description: "Path to the directory storing logs.", + }, }, - handler: require('./start') + handler: start, }, - 'stop': { - description: 'Stop the cluster', + stop: { + description: "Stop the cluster", options: { debug: { - type: 'boolean', - shortcut: 'b', - description: 'Print debug output' + type: "boolean", + shortcut: "b", + description: "Print debug output", }, log: { - type: 'string', - description: 'Path to the directory storing logs.' - } + type: "string", + description: "Path to the directory storing logs.", + }, }, - handler: require('./stop') - } - } + handler: stop, + }, + }, }).route(); diff --git a/packages/lxd/lib/cluster/cli/start/index.js b/packages/lxd/lib/cluster/cli/start/index.js index 719e0c677..7178502e7 100644 --- a/packages/lxd/lib/cluster/cli/start/index.js +++ b/packages/lxd/lib/cluster/cli/start/index.js @@ -1,16 +1,16 @@ // Dependencies -const path = require('path'); -const dedent = require('dedent'); -const nikita = require('@nikitajs/core/lib'); -require('@nikitajs/lxd/lib/register'); -require('@nikitajs/log/src/register'); +import path from 'node:path' +import dedent from "dedent"; +import nikita from '@nikitajs/core'; +import '@nikitajs/lxd/register'; +import '@nikitajs/log/register'; const key = path.relative( process.cwd(), `${__dirname}/../../../assets/.vagrant/machines/default/virtualbox/private_key` ); -module.exports = async function({params}) { +export default async function({params}) { await nikita({ $debug: params.debug }).log.cli({ diff --git a/packages/lxd/lib/cluster/cli/stop/index.js b/packages/lxd/lib/cluster/cli/stop/index.js index ed2cd11eb..706d3339e 100644 --- a/packages/lxd/lib/cluster/cli/stop/index.js +++ b/packages/lxd/lib/cluster/cli/stop/index.js @@ -1,9 +1,9 @@ // Dependencies -const nikita = require("@nikitajs/core/lib"); -require("@nikitajs/lxd/lib/register"); -require("@nikitajs/log/src/register"); +import nikita from '@nikitajs/core'; +import "@nikitajs/lxd/register"; +import "@nikitajs/log/register"; -module.exports = function ({ params }) { +export default function ({ params }) { nikita({ $debug: params.debug, }) diff --git a/packages/lxd/lib/cluster/delete/index.js b/packages/lxd/lib/cluster/delete/index.js index 43b767700..426cc05a4 100644 --- a/packages/lxd/lib/cluster/delete/index.js +++ b/packages/lxd/lib/cluster/delete/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json') +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (!!config.pre_delete) { await this.call(config, config.pre_delete); diff --git a/packages/lxd/lib/cluster/delete/schema.json b/packages/lxd/lib/cluster/delete/schema.json index 9d9285267..53f95aba0 100644 --- a/packages/lxd/lib/cluster/delete/schema.json +++ b/packages/lxd/lib/cluster/delete/schema.json @@ -3,14 +3,14 @@ "type": "object", "properties": { "containers": { - "$ref": "module://@nikitajs/lxd/lib/cluster#/definitions/config/properties/containers" + "$ref": "module://@nikitajs/lxd/cluster#/definitions/config/properties/containers" }, "networks": { "type": "object", "default": {}, "patternProperties": { ".*": { - "$ref": "module://@nikitajs/lxd/lib/network#/definitions/config/properties/properties" + "$ref": "module://@nikitajs/lxd/network#/definitions/config/properties/properties" } } }, diff --git a/packages/lxd/lib/cluster/index.js b/packages/lxd/lib/cluster/index.js index b9a10ab3f..407071b08 100644 --- a/packages/lxd/lib/cluster/index.js +++ b/packages/lxd/lib/cluster/index.js @@ -1,10 +1,10 @@ // Dependencies -const dedent = require('dedent'); -const utils = require('../utils'); -const definitions = require('./schema.json'); +import dedent from "dedent"; +import utils from "@nikitajs/lxd/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (!!config.prevision) { await this.call(config, config.prevision); diff --git a/packages/lxd/lib/cluster/samples/three_nodes.js b/packages/lxd/lib/cluster/samples/three_nodes.js index 5e051e790..500ee7437 100644 --- a/packages/lxd/lib/cluster/samples/three_nodes.js +++ b/packages/lxd/lib/cluster/samples/three_nodes.js @@ -1,9 +1,9 @@ // Dependencies -const path = require("path"); -const nikita = require("@nikitajs/core/lib"); -require("@nikitajs/log/lib/register"); -require("@nikitajs/lxd/lib/register"); -require("@nikitajs/tools/lib/register"); +import path from "node:path"; +import nikita from '@nikitajs/core'; +import "@nikitajs/log/register"; +import "@nikitajs/lxd/register"; +import "@nikitajs/tools/register"; /* Notes: diff --git a/packages/lxd/lib/cluster/schema.json b/packages/lxd/lib/cluster/schema.json index 63a9e34e5..25152c83a 100644 --- a/packages/lxd/lib/cluster/schema.json +++ b/packages/lxd/lib/cluster/schema.json @@ -7,18 +7,18 @@ "description": "Initialize a Linux Container with given image name, container name and\nconfig.", "patternProperties": { "(^[a-zA-Z][a-zA-Z0-9-]{0,61}[a-zA-Z0-9](?!-)$)|(^[a-zA-Z]$)": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config", + "$ref": "module://@nikitajs/lxd/init#/definitions/config", "type": "object", "properties": { "properties": { - "$ref": "module://@nikitajs/lxd/lib/config/set#/definitions/config/properties/properties" + "$ref": "module://@nikitajs/lxd/config/set#/definitions/config/properties/properties" }, "disk": { "type": "object", "default": {}, "patternProperties": { ".*": { - "$ref": "module://@nikitajs/lxd/lib/config/device#/definitions/disk/properties/properties" + "$ref": "module://@nikitajs/lxd/config/device#/definitions/disk/properties/properties" } } }, @@ -43,7 +43,7 @@ } }, { - "$ref": "module://@nikitajs/lxd/lib/config/device#/definitions/nic/properties/properties" + "$ref": "module://@nikitajs/lxd/config/device#/definitions/nic/properties/properties" } ] } @@ -54,7 +54,7 @@ "default": {}, "patternProperties": { ".*": { - "$ref": "module://@nikitajs/lxd/lib/config/device#/definitions/proxy/properties/properties" + "$ref": "module://@nikitajs/lxd/config/device#/definitions/proxy/properties/properties" } } }, @@ -98,7 +98,7 @@ "default": {}, "patternProperties": { ".*": { - "$ref": "module://@nikitajs/lxd/lib/network#/definitions/config/properties/properties" + "$ref": "module://@nikitajs/lxd/network#/definitions/config/properties/properties" } } }, diff --git a/packages/lxd/lib/cluster/stop/index.js b/packages/lxd/lib/cluster/stop/index.js index 25704d8ff..7d91cd902 100644 --- a/packages/lxd/lib/cluster/stop/index.js +++ b/packages/lxd/lib/cluster/stop/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (!!config.pre_stop) { await this.call(config, config.pre_stop); diff --git a/packages/lxd/lib/cluster/stop/schema.json b/packages/lxd/lib/cluster/stop/schema.json index 06586fcdf..7444960cb 100644 --- a/packages/lxd/lib/cluster/stop/schema.json +++ b/packages/lxd/lib/cluster/stop/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "containers": { - "$ref": "module://@nikitajs/lxd/lib/cluster#/definitions/config/properties/containers" + "$ref": "module://@nikitajs/lxd/cluster#/definitions/config/properties/containers" }, "wait": { "type": "boolean", diff --git a/packages/lxd/lib/config/device/delete/index.js b/packages/lxd/lib/config/device/delete/index.js index 21d187cd1..bd4d93e51 100644 --- a/packages/lxd/lib/config/device/delete/index.js +++ b/packages/lxd/lib/config/device/delete/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // ## Exports -module.exports = { +export default { handler: async function({config}) { const {properties} = (await this.lxc.config.device.show({ container: config.container, diff --git a/packages/lxd/lib/config/device/delete/schema.json b/packages/lxd/lib/config/device/delete/schema.json index 979d85c00..470d3d8c7 100644 --- a/packages/lxd/lib/config/device/delete/schema.json +++ b/packages/lxd/lib/config/device/delete/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" }, "device": { "type": "string", diff --git a/packages/lxd/lib/config/device/exists/index.js b/packages/lxd/lib/config/device/exists/index.js index b14c8f210..23264892c 100644 --- a/packages/lxd/lib/config/device/exists/index.js +++ b/packages/lxd/lib/config/device/exists/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // ## Exports -module.exports = { +export default { handler: async function({config}) { const {properties} = (await this.lxc.config.device.show({ container: config.container, diff --git a/packages/lxd/lib/config/device/exists/schema.json b/packages/lxd/lib/config/device/exists/schema.json index 979d85c00..470d3d8c7 100644 --- a/packages/lxd/lib/config/device/exists/schema.json +++ b/packages/lxd/lib/config/device/exists/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" }, "device": { "type": "string", diff --git a/packages/lxd/lib/config/device/index.js b/packages/lxd/lib/config/device/index.js index d845b5954..101efd430 100644 --- a/packages/lxd/lib/config/device/index.js +++ b/packages/lxd/lib/config/device/index.js @@ -1,10 +1,11 @@ // Dependencies -const diff = require('object-diff'); -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import diff from 'object-diff'; +import utils from "@nikitajs/lxd/utils"; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { // No properties, dont go further if (Object.keys(config.properties).length === 0) return false; @@ -34,9 +35,9 @@ module.exports = { config.type, ...Object.keys(config.properties).map( (key) => - utils.string.escapeshellarg(key) + + esa(key) + "=" + - utils.string.escapeshellarg(config.properties[key]) + esa(config.properties[key]) ), ].join(" "), }); @@ -56,7 +57,7 @@ module.exports = { config.container, config.device, key, - utils.string.escapeshellarg(value), + esa(value), ].join(" "), }); } diff --git a/packages/lxd/lib/config/device/schema.json b/packages/lxd/lib/config/device/schema.json index fddfb1cd8..17794d99d 100644 --- a/packages/lxd/lib/config/device/schema.json +++ b/packages/lxd/lib/config/device/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" }, "device": { "type": "string", diff --git a/packages/lxd/lib/config/device/show/index.js b/packages/lxd/lib/config/device/show/index.js index 718da6917..be2cdb31a 100644 --- a/packages/lxd/lib/config/device/show/index.js +++ b/packages/lxd/lib/config/device/show/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // ## Exports -module.exports = { +export default { handler: async function({config}) { const {data} = (await this.lxc.query({ path: '/' + ['1.0', 'instances', config.container].join('/') diff --git a/packages/lxd/lib/config/device/show/schema.json b/packages/lxd/lib/config/device/show/schema.json index 979d85c00..470d3d8c7 100644 --- a/packages/lxd/lib/config/device/show/schema.json +++ b/packages/lxd/lib/config/device/show/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" }, "device": { "type": "string", diff --git a/packages/lxd/lib/config/set/index.js b/packages/lxd/lib/config/set/index.js index 520257fb6..a07b2a381 100644 --- a/packages/lxd/lib/config/set/index.js +++ b/packages/lxd/lib/config/set/index.js @@ -1,25 +1,25 @@ // Dependencies -const {merge} = require('mixme'); -const yaml = require('js-yaml'); -const diff = require('object-diff'); -const definitions = require('./schema.json'); +import { merge } from "mixme"; +import yaml from "js-yaml"; +import diff from "object-diff"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { - handler: async function({config}) { +export default { + handler: async function ({ config }) { // Normalize config for (const key in config.properties) { const value = config.properties[key]; - if (typeof value === 'string') { + if (typeof value === "string") { continue; } config.properties[key] = value.toString(); } - const {stdout} = (await this.execute({ + const { stdout } = await this.execute({ $shy: true, - command: `${['lxc', 'config', 'show', config.container].join(' ')}`, - code: [0, 42] - })); + command: `${["lxc", "config", "show", config.container].join(" ")}`, + code: [0, 42], + }); const { config: properties } = yaml.load(stdout); const changes = diff(properties, merge(properties, config.properties)); if (Object.keys(changes).length === 0) return false; @@ -28,12 +28,19 @@ module.exports = { // if changes is empty status is false because no command were executed // Note, it doesnt seem possible to set multiple keys in one command await this.execute({ - command: ['lxc', 'config', 'set', config.container, key, `'${value.replace('\'', '\\\'')}'`].join(' ') + command: [ + "lxc", + "config", + "set", + config.container, + key, + `'${value.replace("'", "\\'")}'`, + ].join(" "), }); } return true; }, metadata: { - definitions: definitions - } + definitions: definitions, + }, }; diff --git a/packages/lxd/lib/config/set/schema.json b/packages/lxd/lib/config/set/schema.json index 58ade7206..d1fa94fa5 100644 --- a/packages/lxd/lib/config/set/schema.json +++ b/packages/lxd/lib/config/set/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" }, "properties": { "type": "object", diff --git a/packages/lxd/lib/delete/index.js b/packages/lxd/lib/delete/index.js index 3cc04e207..8e917097b 100644 --- a/packages/lxd/lib/delete/index.js +++ b/packages/lxd/lib/delete/index.js @@ -1,9 +1,9 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require('./schema.json'); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { return (await this.execute({ command: dedent` diff --git a/packages/lxd/lib/delete/schema.json b/packages/lxd/lib/delete/schema.json index 5e316225a..6d416b972 100644 --- a/packages/lxd/lib/delete/schema.json +++ b/packages/lxd/lib/delete/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" }, "force": { "type": "boolean", diff --git a/packages/lxd/lib/exec/index.js b/packages/lxd/lib/exec/index.js index ce915df29..6ab8d1f86 100644 --- a/packages/lxd/lib/exec/index.js +++ b/packages/lxd/lib/exec/index.js @@ -1,14 +1,13 @@ // Dependencies -const definitions = require('./schema.json'); -const utils = require('../utils'); -const esa = utils.string.escapeshellarg; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const opt = [ config.user ? `--user ${config.user}` : void 0, - config.cwd ? `--cwd ${utils.string.escapeshellarg(config.cwd)}` : void 0, + config.cwd ? `--cwd ${esa(config.cwd)}` : void 0, ...Object.keys(config.env).map( (k) => `--env ${esa(k)}=${esa(config.env[k])}` ), diff --git a/packages/lxd/lib/exec/schema.json b/packages/lxd/lib/exec/schema.json index 9b396bf3d..b92d20cf5 100644 --- a/packages/lxd/lib/exec/schema.json +++ b/packages/lxd/lib/exec/schema.json @@ -1,10 +1,10 @@ { "config": { "type": "object", - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config", + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" }, "command": { "type": "string", @@ -25,10 +25,10 @@ "description": "The shell in which to execute commands, for example `sh`, `bash` or\n`zsh`." }, "trim": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/trim" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/trim" }, "trap": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/trap" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/trap" }, "user": { "type": "integer", diff --git a/packages/lxd/lib/exists/index.js b/packages/lxd/lib/exists/index.js index f8daf5741..f3b387566 100644 --- a/packages/lxd/lib/exists/index.js +++ b/packages/lxd/lib/exists/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { // Check if container exist const { $status } = await this.lxc.query({ diff --git a/packages/lxd/lib/exists/schema.json b/packages/lxd/lib/exists/schema.json index 9fdc4a184..da0817521 100644 --- a/packages/lxd/lib/exists/schema.json +++ b/packages/lxd/lib/exists/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" } }, "required": [ diff --git a/packages/lxd/lib/file/exists/index.js b/packages/lxd/lib/file/exists/index.js index bc8a524e2..7e5f724dd 100644 --- a/packages/lxd/lib/file/exists/index.js +++ b/packages/lxd/lib/file/exists/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {$status} = await this.lxc.exec({ $header: `Check if file exists in container ${config.container}`, diff --git a/packages/lxd/lib/file/exists/schema.json b/packages/lxd/lib/file/exists/schema.json index 18b281ccd..34170a765 100644 --- a/packages/lxd/lib/file/exists/schema.json +++ b/packages/lxd/lib/file/exists/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" }, "target": { "type": "string", diff --git a/packages/lxd/lib/file/pull/index.js b/packages/lxd/lib/file/pull/index.js index 73140c5fe..c7bb8d41e 100644 --- a/packages/lxd/lib/file/pull/index.js +++ b/packages/lxd/lib/file/pull/index.js @@ -1,11 +1,11 @@ // Dependencies -const path = require('path'); -const dedent = require('dedent'); -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import path from 'node:path' +import dedent from "dedent"; +import utils from "@nikitajs/lxd/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { if (!config.source) { throw Error("Invalid Option: source is required"); diff --git a/packages/lxd/lib/file/pull/schema.json b/packages/lxd/lib/file/pull/schema.json index 753c5c9ac..96c7af166 100644 --- a/packages/lxd/lib/file/pull/schema.json +++ b/packages/lxd/lib/file/pull/schema.json @@ -3,11 +3,11 @@ "type": "object", "properties": { "algo": { - "$ref": "module://@nikitajs/core/lib/actions/fs/hash#/definitions/config/properties/algo", + "$ref": "module://@nikitajs/core/actions/fs/hash#/definitions/config/properties/algo", "default": "md5" }, "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container", + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container", "description": "Name of the container in lxd." }, "source": { diff --git a/packages/lxd/lib/file/push/index.js b/packages/lxd/lib/file/push/index.js index 94811b5e9..ac2ab3291 100644 --- a/packages/lxd/lib/file/push/index.js +++ b/packages/lxd/lib/file/push/index.js @@ -1,11 +1,11 @@ // Dependencies -const path = require('path'); -const dedent = require('dedent'); -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import path from 'node:path' +import dedent from "dedent"; +import utils from "@nikitajs/lxd/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, metadata: { tmpdir } }) { // Make source file with content if (config.content != null) { diff --git a/packages/lxd/lib/file/push/schema.json b/packages/lxd/lib/file/push/schema.json index d79511935..10126f2dd 100644 --- a/packages/lxd/lib/file/push/schema.json +++ b/packages/lxd/lib/file/push/schema.json @@ -4,10 +4,10 @@ "properties": { "algo": { "default": "md5", - "$ref": "module://@nikitajs/core/lib/actions/fs/hash#/definitions/config/properties/algo" + "$ref": "module://@nikitajs/core/actions/fs/hash#/definitions/config/properties/algo" }, "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container", + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container", "description": "Name of the container in lxd." }, "content": { diff --git a/packages/lxd/lib/file/read/index.js b/packages/lxd/lib/file/read/index.js index 49dccab43..03216c5ba 100644 --- a/packages/lxd/lib/file/read/index.js +++ b/packages/lxd/lib/file/read/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { let {data} = (await this.lxc.query({ $header: `Check if file exists in container ${config.container}`, diff --git a/packages/lxd/lib/file/read/schema.json b/packages/lxd/lib/file/read/schema.json index 3f780ec3f..bb77e742e 100644 --- a/packages/lxd/lib/file/read/schema.json +++ b/packages/lxd/lib/file/read/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" }, "target": { "type": "string", diff --git a/packages/lxd/lib/goodies/prlimit/index.js b/packages/lxd/lib/goodies/prlimit/index.js index e18ceb12d..3f459f3c1 100644 --- a/packages/lxd/lib/goodies/prlimit/index.js +++ b/packages/lxd/lib/goodies/prlimit/index.js @@ -1,7 +1,7 @@ // Dependencies -const dedent = require('dedent'); -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import dedent from "dedent"; +import utils from "@nikitajs/lxd/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Errors const errors = { @@ -11,7 +11,7 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function({config}) { try { // TODO: pass sudo as a config instead of inside the command diff --git a/packages/lxd/lib/goodies/prlimit/schema.json b/packages/lxd/lib/goodies/prlimit/schema.json index 9fdc4a184..da0817521 100644 --- a/packages/lxd/lib/goodies/prlimit/schema.json +++ b/packages/lxd/lib/goodies/prlimit/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" } }, "required": [ diff --git a/packages/lxd/lib/info/index.js b/packages/lxd/lib/info/index.js index b025dea14..a34bca386 100644 --- a/packages/lxd/lib/info/index.js +++ b/packages/lxd/lib/info/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { // Check if container exist return await this.lxc.query({ diff --git a/packages/lxd/lib/info/schema.json b/packages/lxd/lib/info/schema.json index 9fdc4a184..da0817521 100644 --- a/packages/lxd/lib/info/schema.json +++ b/packages/lxd/lib/info/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" } }, "required": [ diff --git a/packages/lxd/lib/init/index.js b/packages/lxd/lib/init/index.js index f8307187f..8c66cb77c 100644 --- a/packages/lxd/lib/init/index.js +++ b/packages/lxd/lib/init/index.js @@ -1,9 +1,9 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require('./schema.json'); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const command_init = [ "lxc", diff --git a/packages/lxd/lib/list/index.js b/packages/lxd/lib/list/index.js index 6a963af45..25b104121 100644 --- a/packages/lxd/lib/list/index.js +++ b/packages/lxd/lib/list/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // ## Exports -module.exports = { +export default { handler: async function({config}) { const {data} = (await this.lxc.query({ $shy: false, diff --git a/packages/lxd/lib/network/attach/index.js b/packages/lxd/lib/network/attach/index.js index f8c4abe60..ad5171f2f 100644 --- a/packages/lxd/lib/network/attach/index.js +++ b/packages/lxd/lib/network/attach/index.js @@ -1,10 +1,10 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require('./schema.json'); -const esa = require('../../utils').string.escapeshellarg; +import dedent from "dedent"; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { //Build command const command_attach = [ diff --git a/packages/lxd/lib/network/attach/schema.json b/packages/lxd/lib/network/attach/schema.json index 8152eb377..dddb3429a 100644 --- a/packages/lxd/lib/network/attach/schema.json +++ b/packages/lxd/lib/network/attach/schema.json @@ -7,7 +7,7 @@ "description": "The network name to attach." }, "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" } }, "required": [ diff --git a/packages/lxd/lib/network/delete/index.js b/packages/lxd/lib/network/delete/index.js index 5e687625d..faf973aed 100644 --- a/packages/lxd/lib/network/delete/index.js +++ b/packages/lxd/lib/network/delete/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { return await this.lxc.query({ path: `/1.0/networks/${config.network}`, diff --git a/packages/lxd/lib/network/detach/index.js b/packages/lxd/lib/network/detach/index.js index 1885c2a5f..af3a4e316 100644 --- a/packages/lxd/lib/network/detach/index.js +++ b/packages/lxd/lib/network/detach/index.js @@ -1,9 +1,9 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require('./schema.json'); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {$status} = await this.execute({ command: dedent` diff --git a/packages/lxd/lib/network/detach/schema.json b/packages/lxd/lib/network/detach/schema.json index 96d61abcd..26748decf 100644 --- a/packages/lxd/lib/network/detach/schema.json +++ b/packages/lxd/lib/network/detach/schema.json @@ -7,7 +7,7 @@ "description": "The network name to detach." }, "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" } }, "required": [ diff --git a/packages/lxd/lib/network/index.js b/packages/lxd/lib/network/index.js index 6c6fada5f..36de20cc2 100644 --- a/packages/lxd/lib/network/index.js +++ b/packages/lxd/lib/network/index.js @@ -1,13 +1,13 @@ // Dependencies -const dedent = require("dedent"); -const yaml = require('js-yaml'); -const diff = require('object-diff'); -const {merge} = require('mixme'); -const definitions = require("./schema.json"); -const isa = require('../utils').string.escapeshellarg; +import dedent from "dedent"; +import yaml from 'js-yaml'; +import diff from 'object-diff'; +import {merge} from 'mixme'; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // ## Exports -module.exports = { +export default { handler: async function({config}) { // Normalize config for (const key in config.properties) { @@ -25,7 +25,7 @@ module.exports = { "create", config.network, ...Object.keys(config.properties).map( - (key) => isa(key) + "=" + isa(config.properties[key]) + (key) => esa(key) + "=" + esa(config.properties[key]) ), ].join(" ")} `, @@ -57,9 +57,9 @@ module.exports = { "lxc", "network", "set", - isa(config.network), - isa(key), - isa(value), + esa(config.network), + esa(key), + esa(value), ].join(" "), }); } diff --git a/packages/lxd/lib/network/list/index.js b/packages/lxd/lib/network/list/index.js index b5f7605e2..345067f4c 100644 --- a/packages/lxd/lib/network/list/index.js +++ b/packages/lxd/lib/network/list/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function () { const { data } = await this.lxc.query({ path: "/1.0/networks", diff --git a/packages/lxd/lib/query/index.js b/packages/lxd/lib/query/index.js index 36c5f4486..891a3aead 100644 --- a/packages/lxd/lib/query/index.js +++ b/packages/lxd/lib/query/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); -const esa = require('../utils').string.escapeshellarg; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const { $status, stdout } = await this.execute({ command: [ diff --git a/packages/lxd/lib/query/schema.json b/packages/lxd/lib/query/schema.json index e6360db62..ccd957136 100644 --- a/packages/lxd/lib/query/schema.json +++ b/packages/lxd/lib/query/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "code": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/code" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/code" }, "data": { "type": "string", diff --git a/packages/lxd/lib/register.js b/packages/lxd/lib/register.js index aa0bb3a2c..ca834efee 100644 --- a/packages/lxd/lib/register.js +++ b/packages/lxd/lib/register.js @@ -1,79 +1,74 @@ // Dependencies -const registry = require('@nikitajs/core/lib/registry'); +import registry from "@nikitajs/core/registry"; // Action registration -require('@nikitajs/file/lib/register'); -require('@nikitajs/network/lib/register'); -module.exports = { +import '@nikitajs/file/register'; +import '@nikitajs/network/register'; + +// Actions +const actions = { lxc: { cluster: { - '': '@nikitajs/lxd/lib/cluster', - stop: '@nikitajs/lxd/lib/cluster/stop', - delete: '@nikitajs/lxd/lib/cluster/delete' + '': '@nikitajs/lxd/cluster', + stop: '@nikitajs/lxd/cluster/stop', + delete: '@nikitajs/lxd/cluster/delete' }, config: { device: { - '': '@nikitajs/lxd/lib/config/device', - delete: '@nikitajs/lxd/lib/config/device/delete', - exists: '@nikitajs/lxd/lib/config/device/exists', - show: '@nikitajs/lxd/lib/config/device/show' + '': '@nikitajs/lxd/config/device', + delete: '@nikitajs/lxd/config/device/delete', + exists: '@nikitajs/lxd/config/device/exists', + show: '@nikitajs/lxd/config/device/show' }, - set: '@nikitajs/lxd/lib/config/set' + set: '@nikitajs/lxd/config/set' }, - exists: '@nikitajs/lxd/lib/exists', - init: '@nikitajs/lxd/lib/init', - info: '@nikitajs/lxd/lib/info', - delete: '@nikitajs/lxd/lib/delete', - start: '@nikitajs/lxd/lib/start', - state: '@nikitajs/lxd/lib/state', - stop: '@nikitajs/lxd/lib/stop', - exec: '@nikitajs/lxd/lib/exec', + exists: '@nikitajs/lxd/exists', + init: '@nikitajs/lxd/init', + info: '@nikitajs/lxd/info', + delete: '@nikitajs/lxd/delete', + start: '@nikitajs/lxd/start', + state: '@nikitajs/lxd/state', + stop: '@nikitajs/lxd/stop', + exec: '@nikitajs/lxd/exec', file: { - exists: '@nikitajs/lxd/lib/file/exists', - pull: '@nikitajs/lxd/lib/file/pull', - push: '@nikitajs/lxd/lib/file/push', - read: '@nikitajs/lxd/lib/file/read' + exists: '@nikitajs/lxd/file/exists', + pull: '@nikitajs/lxd/file/pull', + push: '@nikitajs/lxd/file/push', + read: '@nikitajs/lxd/file/read' }, goodies: { - prlimit: '@nikitajs/lxd/lib/goodies/prlimit' + prlimit: '@nikitajs/lxd/goodies/prlimit' }, network: { - '': '@nikitajs/lxd/lib/network', - create: '@nikitajs/lxd/lib/network', - attach: '@nikitajs/lxd/lib/network/attach', - detach: '@nikitajs/lxd/lib/network/detach', - delete: '@nikitajs/lxd/lib/network/delete', - list: '@nikitajs/lxd/lib/network/list' + '': '@nikitajs/lxd/network', + create: '@nikitajs/lxd/network', + attach: '@nikitajs/lxd/network/attach', + detach: '@nikitajs/lxd/network/detach', + delete: '@nikitajs/lxd/network/delete', + list: '@nikitajs/lxd/network/list' }, - query: '@nikitajs/lxd/lib/query', - list: '@nikitajs/lxd/lib/list', - running: '@nikitajs/lxd/lib/running', + query: '@nikitajs/lxd/query', + list: '@nikitajs/lxd/list', + running: '@nikitajs/lxd/running', storage: { - '': '@nikitajs/lxd/lib/storage', - delete: '@nikitajs/lxd/lib/storage/delete', - exists: '@nikitajs/lxd/lib/storage/exists', - list: '@nikitajs/lxd/lib/storage/list', + '': '@nikitajs/lxd/storage', + delete: '@nikitajs/lxd/storage/delete', + exists: '@nikitajs/lxd/storage/exists', + list: '@nikitajs/lxd/storage/list', volume: { - '': '@nikitajs/lxd/lib/storage/volume', - delete: '@nikitajs/lxd/lib/storage/volume/delete', - list: '@nikitajs/lxd/lib/storage/volume/list', - get: '@nikitajs/lxd/lib/storage/volume/get', - attach: '@nikitajs/lxd/lib/storage/volume/attach' + '': '@nikitajs/lxd/storage/volume', + delete: '@nikitajs/lxd/storage/volume/delete', + list: '@nikitajs/lxd/storage/volume/list', + get: '@nikitajs/lxd/storage/volume/get', + attach: '@nikitajs/lxd/storage/volume/attach' } }, wait: { - ready: '@nikitajs/lxd/lib/wait/ready' + ready: '@nikitajs/lxd/wait/ready' }, - resources: '@nikitajs/lxd/lib/resources' + resources: '@nikitajs/lxd/resources' } }; -(async function() { - try { - return (await registry.register(module.exports)); - } catch (error) { - console.error(error.stack); - return process.exit(1); - } -})(); +await registry.register(actions) diff --git a/packages/lxd/lib/resources/index.js b/packages/lxd/lib/resources/index.js index e7c1be146..0fd17ae97 100644 --- a/packages/lxd/lib/resources/index.js +++ b/packages/lxd/lib/resources/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function() { const {data, $status} = await this.lxc.query({ path: "/1.0/resources" diff --git a/packages/lxd/lib/running/index.js b/packages/lxd/lib/running/index.js index 9ed4586b8..a021ee575 100644 --- a/packages/lxd/lib/running/index.js +++ b/packages/lxd/lib/running/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { return await this.execute({ command: `lxc list -c ns --format csv | grep '${config.container},RUNNING' || exit 42`, diff --git a/packages/lxd/lib/running/schema.json b/packages/lxd/lib/running/schema.json index 9fdc4a184..da0817521 100644 --- a/packages/lxd/lib/running/schema.json +++ b/packages/lxd/lib/running/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" } }, "required": [ diff --git a/packages/lxd/lib/start/index.js b/packages/lxd/lib/start/index.js index 58d616ae7..7b6f6329c 100644 --- a/packages/lxd/lib/start/index.js +++ b/packages/lxd/lib/start/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { // Check if container is not already running const {$status: running} = await this.lxc.running(config.container) diff --git a/packages/lxd/lib/start/schema.json b/packages/lxd/lib/start/schema.json index 9fdc4a184..da0817521 100644 --- a/packages/lxd/lib/start/schema.json +++ b/packages/lxd/lib/start/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" } }, "required": [ diff --git a/packages/lxd/lib/state/index.js b/packages/lxd/lib/state/index.js index 69ea52098..e2c42b793 100644 --- a/packages/lxd/lib/state/index.js +++ b/packages/lxd/lib/state/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {data, $status} = (await this.lxc.query({ path: `/1.0/instances/${config.container}/state` diff --git a/packages/lxd/lib/state/schema.json b/packages/lxd/lib/state/schema.json index 9fdc4a184..da0817521 100644 --- a/packages/lxd/lib/state/schema.json +++ b/packages/lxd/lib/state/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" } }, "required": [ diff --git a/packages/lxd/lib/stop/index.js b/packages/lxd/lib/stop/index.js index af9523c05..229c94415 100644 --- a/packages/lxd/lib/stop/index.js +++ b/packages/lxd/lib/stop/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { // Check if container is running const {$status: running} = await this.lxc.running(config.container) diff --git a/packages/lxd/lib/stop/schema.json b/packages/lxd/lib/stop/schema.json index 49ef530aa..baab6b363 100644 --- a/packages/lxd/lib/stop/schema.json +++ b/packages/lxd/lib/stop/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" }, "wait": { "type": "boolean", diff --git a/packages/lxd/lib/storage/delete/index.js b/packages/lxd/lib/storage/delete/index.js index be4edcb0d..a446216b3 100644 --- a/packages/lxd/lib/storage/delete/index.js +++ b/packages/lxd/lib/storage/delete/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); -const esa = require('../../utils').string.escapeshellarg; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { // Check if storage exists const {exists} = await this.lxc.storage.exists(config.name); diff --git a/packages/lxd/lib/storage/exists/index.js b/packages/lxd/lib/storage/exists/index.js index 947e69e46..0eaf841b0 100644 --- a/packages/lxd/lib/storage/exists/index.js +++ b/packages/lxd/lib/storage/exists/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {storages} = await this.lxc.storage.list() return { diff --git a/packages/lxd/lib/storage/index.js b/packages/lxd/lib/storage/index.js index e8ef92110..31f2ed45e 100644 --- a/packages/lxd/lib/storage/index.js +++ b/packages/lxd/lib/storage/index.js @@ -1,75 +1,62 @@ // Dependencies -const definitions = require('./schema.json'); -var diff, handler, yaml; +import dedent from 'dedent'; +import yaml from 'js-yaml'; +import diff from 'object-diff'; +import definitions from "./schema.json" assert { type: "json" }; -// ## Handler -handler = async function({config}) { - var $status, changes, code, currentProperties, k, key, ref, stdout, v, value; - ref = config.properties; - // Normalize config - for (k in ref) { - v = ref[k]; - if (typeof v === 'string') { - continue; +// Action +export default { + handler: async function({config}) { + // Normalize config + for (const k in config.properties) { + const v = config.properties[k]; + if (typeof v === 'string') { + continue; + } + config.properties[k] = v.toString(); } - config.properties[k] = v.toString(); - } - // Check if exists - ({stdout, code} = (await this.execute({ - command: `lxc storage show ${config.name} && exit 42 -${[ - 'lxc', - 'storage', - 'create', - config.name, - config.driver, - ...((function() { - var ref1, - results; - ref1 = config.properties; - results = []; - for (key in ref1) { - value = ref1[key]; - results.push(`${key}='${value.replace('\'', - '\\\'')}'`); - } - return results; - })()) - ].join(' ')}`, - code: [0, 42] - }))); - if (code !== 42) { - return; - } - // Storage already exists, find the changes - if (!(config != null ? config.properties : void 0)) { - return; - } - ({ - config: currentProperties - } = yaml.load(stdout)); - changes = diff(currentProperties, config.properties); - for (key in changes) { - value = changes[key]; - // if changes is empty status is false because no command were executed - ({$status} = (await this.execute({ - command: ['lxc', 'storage', 'set', config.name, key, `'${value.replace('\'', '\\\'')}'`].join(' ') - }))); - } - return { - $status: $status - }; -}; - -// ## Exports -module.exports = { - handler: handler, + // Check if exists + const { stdout, code } = await this.execute({ + command: dedent` + lxc storage show ${config.name} && exit 42 + ${[ + "lxc", + "storage", + "create", + config.name, + config.driver, + ...(function () { + const results = []; + for (const key in config.properties) { + const value = config.properties[key]; + results.push(`${key}='${value.replace("'", "\\'")}'`); + } + return results; + })(), + ].join(" ")} + `, + code: [0, 42], + }); + if (code !== 42) { + return; + } + // Storage already exists, find the changes + if (config.properties == null) { + return; + } + const { config: currentProperties } = yaml.load(stdout); + const changes = diff(currentProperties, config.properties); + for (const key in changes) { + const value = changes[key]; + await this.execute({ + command: ['lxc', 'storage', 'set', config.name, key, `'${value.replace('\'', '\\\'')}'`].join(' ') + }); + } + return { + $status: Object.keys(changes).length > 0 + }; + }, metadata: { definitions: definitions } }; - -// ## Dependencies -yaml = require('js-yaml'); - -diff = require('object-diff'); diff --git a/packages/lxd/lib/storage/list/index.js b/packages/lxd/lib/storage/list/index.js index 8506d9798..a1494ee6a 100644 --- a/packages/lxd/lib/storage/list/index.js +++ b/packages/lxd/lib/storage/list/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function() { const {data: storages} = await this.lxc.query({ path: `/1.0/storage-pools?recursion=1`, diff --git a/packages/lxd/lib/storage/volume/attach/index.js b/packages/lxd/lib/storage/volume/attach/index.js index 4fed4dadd..f2fa42122 100644 --- a/packages/lxd/lib/storage/volume/attach/index.js +++ b/packages/lxd/lib/storage/volume/attach/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { // note, getting the volume to make sure it exists const { $status: volumeExists, data: volume } = diff --git a/packages/lxd/lib/storage/volume/attach/schema.json b/packages/lxd/lib/storage/volume/attach/schema.json index a437307fe..cc513360c 100644 --- a/packages/lxd/lib/storage/volume/attach/schema.json +++ b/packages/lxd/lib/storage/volume/attach/schema.json @@ -22,7 +22,7 @@ "description": "Type of the storage volume to attach." }, "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container", + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container", "description": "Name of the container to attach the volume to." }, "path": { diff --git a/packages/lxd/lib/storage/volume/delete/index.js b/packages/lxd/lib/storage/volume/delete/index.js index abbac7ac6..61a959578 100644 --- a/packages/lxd/lib/storage/volume/delete/index.js +++ b/packages/lxd/lib/storage/volume/delete/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Actions -module.exports = { +export default { handler: async function({config}) { const {$status} = await this.lxc.query({ path: `/1.0/storage-pools/${config.pool}/volumes/${config.type}/${config.name}`, diff --git a/packages/lxd/lib/storage/volume/get/index.js b/packages/lxd/lib/storage/volume/get/index.js index 6483a24dc..0dda93f8e 100644 --- a/packages/lxd/lib/storage/volume/get/index.js +++ b/packages/lxd/lib/storage/volume/get/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {$status, data} = await this.lxc.query({ path: `/1.0/storage-pools/${config.pool}/volumes/${config.type}/${config.name}`, diff --git a/packages/lxd/lib/storage/volume/index.js b/packages/lxd/lib/storage/volume/index.js index fd70530dc..8fb53c2aa 100644 --- a/packages/lxd/lib/storage/volume/index.js +++ b/packages/lxd/lib/storage/volume/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {$status} = await this.lxc.query({ path: `/1.0/storage-pools/${config.pool}/volumes/${config.type}`, diff --git a/packages/lxd/lib/storage/volume/list/index.js b/packages/lxd/lib/storage/volume/list/index.js index 852f4087c..a639f8a5f 100644 --- a/packages/lxd/lib/storage/volume/list/index.js +++ b/packages/lxd/lib/storage/volume/list/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { let {$status, data} = await this.lxc.query({ path: `/1.0/storage-pools/${config.pool}/volumes/${config.type}`, diff --git a/packages/lxd/lib/utils/index.js b/packages/lxd/lib/utils/index.js index 676c71129..9cd4ba020 100644 --- a/packages/lxd/lib/utils/index.js +++ b/packages/lxd/lib/utils/index.js @@ -1,8 +1,8 @@ // Dependencies -const utils = require('@nikitajs/core/lib/utils'); -const stderr_to_error_message = require('./stderr_to_error_message'); +import utils from "@nikitajs/core/utils"; +import stderr_to_error_message from '@nikitajs/lxd/utils/stderr_to_error_message'; -module.exports = { +export default { ...utils, stderr_to_error_message: stderr_to_error_message }; diff --git a/packages/lxd/lib/utils/stderr_to_error_message.js b/packages/lxd/lib/utils/stderr_to_error_message.js index fd81eaa18..835be6c15 100644 --- a/packages/lxd/lib/utils/stderr_to_error_message.js +++ b/packages/lxd/lib/utils/stderr_to_error_message.js @@ -1,7 +1,7 @@ // Dependencies -const utils = require('@nikitajs/core/lib/utils'); +import utils from "@nikitajs/core/utils"; -module.exports = function(err, stderr) { +export default function(err, stderr) { stderr = stderr.trim(); if (utils.string.lines(stderr).length === 1) { return err.message = stderr; diff --git a/packages/lxd/lib/wait/ready/index.js b/packages/lxd/lib/wait/ready/index.js index 9f83b0515..b0ec68771 100644 --- a/packages/lxd/lib/wait/ready/index.js +++ b/packages/lxd/lib/wait/ready/index.js @@ -1,63 +1,62 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require('./schema.json'); -var handler; +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; -// ## Handler -handler = async function({config}) { - const {$status} = await this.call({ - $retry: 100, - $sleep: 1000 - }, async function() { - const { - config: {processes} - } = (await this.lxc.state({ - $header: "Checking if instance is ready", - container: config.container - })); - // Processes are at -1 when they aren't ready - if (processes < 0) { - throw Error("Reschedule: Instance not booted"); - } - // Sometimes processes alone aren't enough, so we test if we can get the container - const {$status} = (await this.lxc.exec({ - $header: "Trying to execute a command", - container: config.container, - command: dedent` - if ( command -v systemctl || command -v rc-service ); then - exit 0 - else - exit 42 - fi - `, - code: [0, 42] - })); - if ($status === false) { - throw Error("Reschedule: Instance not ready to execute commands"); - } - // Checking if internet is working and ready for us to use - if (config.nat === true) { - const {$status} = await this.lxc.exec({ - $header: "Trying to connect to internet", - container: config.container, - command: config.nat_check, - code: [0, 42] - }); - if ($status === false) { - throw Error("Reschedule: Internet not ready"); +// Action +export default { + handler: async function ({ config }) { + const { $status } = await this.call( + { + $retry: 100, + $sleep: 1000, + }, + async function () { + const { + config: { processes }, + } = await this.lxc.state({ + $header: "Checking if instance is ready", + container: config.container, + }); + // Processes are at -1 when they aren't ready + if (processes < 0) { + throw Error("Reschedule: Instance not booted"); + } + // Sometimes processes alone aren't enough, so we test if we can get the container + const { $status } = await this.lxc.exec({ + $header: "Trying to execute a command", + container: config.container, + command: dedent` + if ( command -v systemctl || command -v rc-service ); then + exit 0 + else + exit 42 + fi + `, + code: [0, 42], + }); + if ($status === false) { + throw Error("Reschedule: Instance not ready to execute commands"); + } + // Checking if internet is working and ready for us to use + if (config.nat === true) { + const { $status } = await this.lxc.exec({ + $header: "Trying to connect to internet", + container: config.container, + command: config.nat_check, + code: [0, 42], + }); + if ($status === false) { + throw Error("Reschedule: Internet not ready"); + } + } } - } - }); - return { - $status: $status - }; -}; - -// ## Exports -module.exports = { - handler: handler, + ); + return { + $status: $status, + }; + }, metadata: { - argument_to_config: 'container', - definitions: definitions - } + argument_to_config: "container", + definitions: definitions, + }, }; diff --git a/packages/lxd/lib/wait/ready/schema.json b/packages/lxd/lib/wait/ready/schema.json index 90ae2be34..741076009 100644 --- a/packages/lxd/lib/wait/ready/schema.json +++ b/packages/lxd/lib/wait/ready/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "container": { - "$ref": "module://@nikitajs/lxd/lib/init#/definitions/config/properties/container" + "$ref": "module://@nikitajs/lxd/init#/definitions/config/properties/container" }, "nat": { "type": "boolean", diff --git a/packages/lxd/package.json b/packages/lxd/package.json index de5e7b1d5..3af819bbc 100644 --- a/packages/lxd/package.json +++ b/packages/lxd/package.json @@ -1,5 +1,6 @@ { "name": "@nikitajs/lxd", + "version": "1.0.0-alpha.3", "description": "Provides Nikita actions for various LXD operations.", "keywords": [ "nikita", @@ -11,7 +12,6 @@ "lxd", "container" ], - "version": "1.0.0-alpha.3", "author": "David Worms ", "bugs": { "url": "https://github.com/adaltas/node-nikita/issues" @@ -65,20 +65,26 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + "./register": "./lib/register.js", + "./utils": "./lib/utils/index.js", + "./utils/*": "./lib/utils/*.js", + "./*": "./lib/*/index.js" + }, "files": [ "/lib" ], "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/lxd/lib/register" - ], "inline-diffs": true, - "timeout": 40000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/lxd/register", + "should" + ], + "throw-deprecation": true, + "timeout": 40000 }, "publishConfig": { "access": "public" @@ -93,5 +99,6 @@ "scripts": { "test": "npm run test:local", "test:local": "mocha 'test/**/*.coffee'" - } + }, + "type": "module" } diff --git a/packages/lxd/test.sample.coffee b/packages/lxd/test.sample.coffee index 137e52775..b104822aa 100644 --- a/packages/lxd/test.sample.coffee +++ b/packages/lxd/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: lxd: true lxd_vm: process.platform is 'linux' && !process.env.CI @@ -16,5 +16,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/lxd/test/cluster/delete.coffee b/packages/lxd/test/cluster/delete.coffee index 54beabdc3..2004576dc 100644 --- a/packages/lxd/test/cluster/delete.coffee +++ b/packages/lxd/test/cluster/delete.coffee @@ -1,12 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) -path = require('path') - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.cluster.delete', -> + return unless test.tags.lxd they 'delete a cluster', ({ssh}) -> @timeout -1 # yum install take a lot of time @@ -21,10 +20,10 @@ describe 'lxc.cluster.delete', -> 'ipv6.address': 'none' containers: 'nikita-cluster-del-1': - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" nic: eth0: name: 'eth0', nictype: 'bridged', parent: 'nktlxdpub' 'nikita-cluster-del-2': - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" nic: eth0: name: 'eth0', nictype: 'bridged', parent: 'nktlxdpub' await registry.register 'clean', -> # Status modified if cluster deleted @@ -62,10 +61,10 @@ describe 'lxc.cluster.delete', -> 'ipv6.address': 'none' containers: 'nikita-cluster-del-1': - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" nic: eth0: name: 'eth0', nictype: 'bridged', parent: 'nktlxdpub' 'nikita-cluster-del-2': - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" nic: eth0: name: 'eth0', nictype: 'bridged', parent: 'nktlxdpub' await @lxc.cluster cluster await @wait time: 200 @@ -86,10 +85,10 @@ describe 'lxc.cluster.delete', -> 'ipv6.address': 'none' containers: 'nikita-cluster-del-1': - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" nic: eth0: name: 'eth0', nictype: 'bridged', parent: 'nktlxdpub' 'nikita-cluster-del-2': - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" nic: eth0: name: 'eth0', nictype: 'bridged', parent: 'nktlxdpub' await registry.register 'clean', -> await @lxc.cluster.delete diff --git a/packages/lxd/test/cluster/index.coffee b/packages/lxd/test/cluster/index.coffee index 2b2b1f894..407c587ef 100644 --- a/packages/lxd/test/cluster/index.coffee +++ b/packages/lxd/test/cluster/index.coffee @@ -1,12 +1,13 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) -path = require('path') - -return unless tags.lxd +import path from 'node:path' +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) +__dirname = new URL( '.', import.meta.url).pathname describe 'lxc.cluster', -> + return unless test.tags.lxd describe 'validation', -> @@ -64,7 +65,7 @@ describe 'lxc.cluster', -> 'dns.domain': 'nikita.local' containers: 'nikita-cluster-1': - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" disk: nikitadir: source: process.env['NIKITA_HOME'] or path.join(__dirname, '../../../../') @@ -126,7 +127,7 @@ describe 'lxc.cluster', -> 'dns.domain': 'nikita.local' containers: 'nikita-cluster-2': - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" nic: eth0: # Overwrite the default DHCP Nat enabled interface name: 'eth0', nictype: 'bridged', parent: 'nktlxdprv' @@ -149,7 +150,7 @@ describe 'lxc.cluster', -> finally await @clean() - return unless tags.lxd_vm + return unless test.tags.lxd_vm they 'init properties with vm', ({ssh}) -> @timeout -1 diff --git a/packages/lxd/test/cluster/index.lifecycle.coffee b/packages/lxd/test/cluster/index.lifecycle.coffee index ba1e9d75c..b4e11a0de 100644 --- a/packages/lxd/test/cluster/index.lifecycle.coffee +++ b/packages/lxd/test/cluster/index.lifecycle.coffee @@ -1,12 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) -path = require('path') - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.cluster.lifecycle', -> + return unless test.tags.lxd they 'prevision and provision', ({ssh}) -> @timeout -1 @@ -17,7 +16,7 @@ describe 'lxc.cluster.lifecycle', -> cluster = containers: 'nikita-cluster-lifecycle-1': - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" await registry.register 'clean', -> await @lxc.cluster.delete cluster, force: true await registry.register 'test', -> diff --git a/packages/lxd/test/cluster/stop.coffee b/packages/lxd/test/cluster/stop.coffee index bfdb9b091..e499ae151 100644 --- a/packages/lxd/test/cluster/stop.coffee +++ b/packages/lxd/test/cluster/stop.coffee @@ -1,12 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) -path = require('path') - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.cluster.stop', -> + return unless test.tags.lxd they 'stop a running cluster', ({ssh}) -> @timeout -1 # yum install take a lot of time @@ -18,10 +17,10 @@ describe 'lxc.cluster.stop', -> 'ipv6.address': 'none' containers: 'nikita-cluster-stop-1': - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" nic: eth0: name: 'eth0', nictype: 'bridged', parent: 'nktlxdpub' 'nikita-cluster-stop-2': - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" nic: eth0: name: 'eth0', nictype: 'bridged', parent: 'nktlxdpub' nikita $ssh: ssh diff --git a/packages/lxd/test/config/device/delete.coffee b/packages/lxd/test/config/device/delete.coffee index 50138daad..e9741beac 100644 --- a/packages/lxd/test/config/device/delete.coffee +++ b/packages/lxd/test/config/device/delete.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.config.device.delete', -> + return unless test.tags.lxd they 'Fail if the device does not exist', ({ssh}) -> -> nikita @@ -15,7 +15,7 @@ describe 'lxc.config.device.delete', -> container: 'nikita-config-device-delete-1' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-device-delete-1' {$status} = await @lxc.config.device.delete device: 'nondevice' @@ -30,7 +30,7 @@ describe 'lxc.config.device.delete', -> container: 'nikita-config-device-delete-2' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-device-delete-2' @lxc.config.device container: 'nikita-config-device-delete-2' diff --git a/packages/lxd/test/config/device/exists.coffee b/packages/lxd/test/config/device/exists.coffee index e424779ba..3c5f5afbc 100644 --- a/packages/lxd/test/config/device/exists.coffee +++ b/packages/lxd/test/config/device/exists.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.config.device.exists', -> + return unless test.tags.lxd they 'Device does not exist', ({ssh}) -> nikita @@ -15,7 +15,7 @@ describe 'lxc.config.device.exists', -> container: 'nikita-config-device-exists-1' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-device-exists-1' {exists} = await @lxc.config.device.exists container: 'nikita-config-device-exists-1' @@ -30,7 +30,7 @@ describe 'lxc.config.device.exists', -> container: 'nikita-config-device-exists-2' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-device-exists-2' @lxc.config.device container: 'nikita-config-device-exists-2' diff --git a/packages/lxd/test/config/device/index.coffee b/packages/lxd/test/config/device/index.coffee index b98cd86f3..741a97a70 100644 --- a/packages/lxd/test/config/device/index.coffee +++ b/packages/lxd/test/config/device/index.coffee @@ -1,12 +1,11 @@ -nikita = require '@nikitajs/core/lib' -assert = require 'assert' -{config, images, tags} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.config.device', -> + return unless test.tags.lxd describe 'schema', -> @@ -17,7 +16,7 @@ describe 'lxc.config.device', -> force: true .lxc.init container: 'nikita-config-device-1' - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" .lxc.config.device container: 'nikita-config-device-1' device: 'test' @@ -34,7 +33,7 @@ describe 'lxc.config.device', -> force: true .lxc.init container: 'nikita-config-device-2' - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" .lxc.config.device container: 'nikita-config-device-2' device: 'test' @@ -51,7 +50,7 @@ describe 'lxc.config.device', -> force: true .lxc.init container: 'nikita-config-device-3' - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" .lxc.config.device container: 'nikita-config-device-3' device: 'test' @@ -72,7 +71,7 @@ describe 'lxc.config.device', -> container: 'nikita-config-device-4' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-device-4' {$status} = await @lxc.config.device container: 'nikita-config-device-4' @@ -89,7 +88,7 @@ describe 'lxc.config.device', -> container: 'nikita-config-device-4' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-device-4' {$status} = await @lxc.config.device container: 'nikita-config-device-4' @@ -111,7 +110,7 @@ describe 'lxc.config.device', -> container: 'nikita-config-device-5' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-device-5' @lxc.config.device container: 'nikita-config-device-5' @@ -137,7 +136,7 @@ describe 'lxc.config.device', -> container: 'nikita-config-device-5' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-device-5' @lxc.config.device container: 'nikita-config-device-5' @@ -165,7 +164,7 @@ describe 'lxc.config.device', -> container: 'nikita-config-device-7' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-device-7' @lxc.config.device container: 'nikita-config-device-7' @@ -190,7 +189,7 @@ describe 'lxc.config.device', -> container: 'nikita-config-device-8' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-device-8' @lxc.config.device container: 'nikita-config-device-8' diff --git a/packages/lxd/test/config/device/show.coffee b/packages/lxd/test/config/device/show.coffee index 7c25bf741..2a8d3aa03 100644 --- a/packages/lxd/test/config/device/show.coffee +++ b/packages/lxd/test/config/device/show.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.config.device.show', -> + return unless test.tags.lxd they 'config output', ({ssh}) -> nikita @@ -15,7 +15,7 @@ describe 'lxc.config.device.show', -> container: 'nikita-config-show-1' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-show-1' @lxc.config.device container: 'nikita-config-show-1' diff --git a/packages/lxd/test/config/set.coffee b/packages/lxd/test/config/set.coffee index 89cfffa4b..4e8271573 100644 --- a/packages/lxd/test/config/set.coffee +++ b/packages/lxd/test/config/set.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.config.set', -> + return unless test.tags.lxd they 'Set multiple keys', ({ssh}) -> nikita @@ -15,7 +15,7 @@ describe 'lxc.config.set', -> container: 'nikita-config-set-1' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-set-1' {$status} = await @lxc.config.set container: 'nikita-config-set-1' @@ -42,7 +42,7 @@ describe 'lxc.config.set', -> container: 'nikita-config-set-2' force: true @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-config-set-2' {$status} = await @lxc.config.set container: 'nikita-config-set-2' diff --git a/packages/lxd/test/delete.coffee b/packages/lxd/test/delete.coffee index a98c9080a..0048e1e14 100644 --- a/packages/lxd/test/delete.coffee +++ b/packages/lxd/test/delete.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require './test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.delete', -> + return unless test.tags.lxd they 'Delete a container', ({ssh}) -> nikita $ssh: ssh , -> await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-delete-1' await @lxc.stop container: 'nikita-delete-1' @@ -28,7 +28,7 @@ describe 'lxc.delete', -> $ssh: ssh , -> await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-delete-2' start: true {$status} = await @lxc.delete diff --git a/packages/lxd/test/exec.coffee b/packages/lxd/test/exec.coffee index 37d7ad6e7..b20c76e87 100644 --- a/packages/lxd/test/exec.coffee +++ b/packages/lxd/test/exec.coffee @@ -1,12 +1,12 @@ -nikita = require '@nikitajs/core/lib' -utils = require '@nikitajs/core/lib/utils' -{config, images, tags} = require './test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import utils from '@nikitajs/core/utils' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.exec', -> + return unless test.tags.lxd describe 'schema', -> @@ -27,7 +27,7 @@ describe 'lxc.exec', -> @lxc.delete 'nikita-exec-1', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-exec-1' start: true {$status, stdout} = await @lxc.exec @@ -49,7 +49,7 @@ describe 'lxc.exec', -> @lxc.delete 'nikita-exec-2', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-exec-2' start: true {stdout} = await @lxc.exec @@ -67,7 +67,7 @@ describe 'lxc.exec', -> @lxc.delete 'nikita-exec-3', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-exec-3' start: true await @lxc.exec @@ -92,7 +92,7 @@ describe 'lxc.exec', -> @lxc.delete 'nikita-exec-4', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-exec-4' start: true @lxc.exec @@ -114,7 +114,7 @@ describe 'lxc.exec', -> @lxc.delete 'nikita-exec-5', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-exec-5' start: true {$status, code} = await @lxc.exec @@ -138,7 +138,7 @@ describe 'lxc.exec', -> @lxc.delete 'nikita-exec-6', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-exec-6' start: true {stdout} = await @lxc.exec @@ -163,7 +163,7 @@ describe 'lxc.exec', -> @lxc.delete 'nikita-exec-7', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-exec-7' start: true @lxc.exec @@ -187,7 +187,7 @@ describe 'lxc.exec', -> @lxc.delete 'nikita-exec-8', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-exec-8' start: true {stdout} = await @lxc.exec diff --git a/packages/lxd/test/exists.coffee b/packages/lxd/test/exists.coffee index c83caa8e0..a514fe61a 100644 --- a/packages/lxd/test/exists.coffee +++ b/packages/lxd/test/exists.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require './test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.exists', -> + return unless test.tags.lxd they 'argument is a string', ({ssh}) -> await nikita.lxc.exists 'nikita-exists-1', ({config}) -> @@ -19,7 +19,7 @@ describe 'lxc.exists', -> @lxc.delete 'nikita-exists-2', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-exists-2' await @lxc.exists 'nikita-exists-2' .should.finally.match exists: true diff --git a/packages/lxd/test/file/exists.coffee b/packages/lxd/test/file/exists.coffee index 0923248fc..3bc9db8da 100644 --- a/packages/lxd/test/file/exists.coffee +++ b/packages/lxd/test/file/exists.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.file.exists', -> + return unless test.tags.lxd they 'when present', ({ssh}) -> nikita @@ -15,7 +15,7 @@ describe 'lxc.file.exists', -> @lxc.delete 'nikita-file-exists-1', force: true await @clean() @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-file-exists-1' start: true @execute @@ -34,7 +34,7 @@ describe 'lxc.file.exists', -> @lxc.delete 'nikita-file-exists-2', force: true await @clean() @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-file-exists-2' start: true {exists} = await @lxc.file.exists @@ -51,7 +51,7 @@ describe 'lxc.file.exists', -> await @lxc.delete 'nikita-file-exists-3', force: true registry.register 'test', -> await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-file-exists-3' start: true # check is exists is true diff --git a/packages/lxd/test/file/pull.coffee b/packages/lxd/test/file/pull.coffee index ef362d710..047a080f7 100644 --- a/packages/lxd/test/file/pull.coffee +++ b/packages/lxd/test/file/pull.coffee @@ -1,7 +1,8 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) ## Todo @@ -11,92 +12,90 @@ they = require('mocha-they')(config) ## Tests describe 'lxc.file.pull', -> - - describe 'usage', -> - return unless tags.lxd + return unless test.tags.lxd - they 'require openssl', ({ssh}) -> - nikita + they 'require openssl', ({ssh}) -> + nikita + $ssh: ssh + , ({registry}) -> + registry.register 'clean', -> + @lxc.delete 'nikita-file-pull-1', force: true + registry.register 'test', -> + await @lxc.init + image: "images:#{test.images.alpine}" + container: 'nikita-file-pull-1' + start: true + await @lxc.start 'nikita-file-pull-1' + # pulling file from container + await @lxc.file.pull + container: 'nikita-file-pull-1' + source: "/root/file.sh" + target: "#{tmpdir}" + .should.be.rejectedWith + code: 'NIKITA_LXD_FILE_PULL_MISSING_OPENSSL' + try + await @clean() + await @test() + catch err + await @clean() + finally + await @clean() + + they 'should pull a file from a remote server', ({ssh}) -> + @timeout -1 + nikita $ssh: ssh - , ({registry}) -> - registry.register 'clean', -> - @lxc.delete 'nikita-file-pull-1', force: true - registry.register 'test', -> + $tmpdir: true + , ({metadata: {tmpdir}, registry}) -> + await registry.register 'clean', -> + await @lxc.delete + container: 'nikita-file-pull-2' + force: true + await @lxc.network.delete + network: 'nktlxdpub' + await registry.register 'test', -> + # creating network + await @lxc.network + network: 'nktlxdpub' + properties: + 'ipv4.address': '10.10.40.1/24' + 'ipv4.nat': true + 'ipv6.address': 'none' + # creating a container await @lxc.init - image: "images:#{images.alpine}" - container: 'nikita-file-pull-1' - start: true - await @lxc.start 'nikita-file-pull-1' + image: "images:#{test.images.alpine}" + container: 'nikita-file-pull-2' + nic: + eth0: + name: 'eth0', nictype: 'bridged', parent: 'nktlxdpub' + ssh: enabled: true + start: true + # attaching network + await @lxc.network.attach + container: 'nikita-file-pull-2' + network: 'nktlxdpub' + # adding openssl for file pull + await @lxc.exec + $retry: 100 + $wait: 200 # Wait for network to be ready + container: 'nikita-file-pull-2' + command: 'apk add openssl' + await @lxc.exec + container: 'nikita-file-pull-2' + command: "touch file.sh && echo 'hello' > file.sh" # pulling file from container await @lxc.file.pull - container: 'nikita-file-pull-1' + container: 'nikita-file-pull-2' source: "/root/file.sh" - target: "#{tmpdir}" - .should.be.rejectedWith - code: 'NIKITA_LXD_FILE_PULL_MISSING_OPENSSL' + target: "#{tmpdir}/" + # check if file exists in temp directory + {exists} = await @fs.base.exists + target: "#{tmpdir}/file.sh" + exists.should.be.eql true try - await @clean() - await @test() - catch err - await @clean() - finally - await @clean() - - they 'should pull a file from a remote server', ({ssh}) -> - @timeout -1 - nikita - $ssh: ssh - $tmpdir: true - , ({metadata: {tmpdir}, registry}) -> - await registry.register 'clean', -> - await @lxc.delete - container: 'nikita-file-pull-2' - force: true - await @lxc.network.delete - network: 'nktlxdpub' - await registry.register 'test', -> - # creating network - await @lxc.network - network: 'nktlxdpub' - properties: - 'ipv4.address': '10.10.40.1/24' - 'ipv4.nat': true - 'ipv6.address': 'none' - # creating a container - await @lxc.init - image: "images:#{images.alpine}" - container: 'nikita-file-pull-2' - nic: - eth0: - name: 'eth0', nictype: 'bridged', parent: 'nktlxdpub' - ssh: enabled: true - start: true - # attaching network - await @lxc.network.attach - container: 'nikita-file-pull-2' - network: 'nktlxdpub' - # adding openssl for file pull - await @lxc.exec - $retry: 100 - $wait: 200 # Wait for network to be ready - container: 'nikita-file-pull-2' - command: 'apk add openssl' - await @lxc.exec - container: 'nikita-file-pull-2' - command: "touch file.sh && echo 'hello' > file.sh" - # pulling file from container - await @lxc.file.pull - container: 'nikita-file-pull-2' - source: "/root/file.sh" - target: "#{tmpdir}/" - # check if file exists in temp directory - {exists} = await @fs.base.exists - target: "#{tmpdir}/file.sh" - exists.should.be.eql true - try - await @clean() - await @test() - catch err - await @clean() - finally - await @clean() + await @clean() + await @test() + catch err + await @clean() + finally + await @clean() diff --git a/packages/lxd/test/file/push.coffee b/packages/lxd/test/file/push.coffee index 85ab10a87..de4ae6c3f 100644 --- a/packages/lxd/test/file/push.coffee +++ b/packages/lxd/test/file/push.coffee @@ -1,12 +1,13 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.file.push', -> describe 'schema', -> - return unless tags.api + return unless test.tags.api it 'mode symbolic', -> nikita.lxc.file.push @@ -27,7 +28,7 @@ describe 'lxc.file.push', -> config.mode.should.eql 0o0700 describe 'usage', -> - return unless tags.lxd + return unless test.tags.lxd they 'require openssl', ({ssh}) -> nikita @@ -38,7 +39,7 @@ describe 'lxc.file.push', -> await @lxc.delete 'nikita-file-push-1', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-file-push-1' start: true await @file.touch @@ -72,7 +73,7 @@ describe 'lxc.file.push', -> 'ipv6.address': 'none' # creating a container await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-file-push-2' nic: eth0: @@ -118,7 +119,7 @@ describe 'lxc.file.push', -> await @lxc.delete 'nikita-file-push-3', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-file-push-3' start: true await @lxc.exec @@ -140,7 +141,7 @@ describe 'lxc.file.push', -> await @clean() describe 'content', -> - return unless tags.lxd + return unless test.tags.lxd they 'a new file', ({ssh}) -> nikita @@ -150,7 +151,7 @@ describe 'lxc.file.push', -> await @lxc.delete 'nikita-file-push-4', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-file-push-4' start: true await @lxc.exec @@ -176,7 +177,7 @@ describe 'lxc.file.push', -> await @lxc.delete 'nikita-file-push-5', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-file-push-5' start: true await @lxc.exec @@ -195,7 +196,7 @@ describe 'lxc.file.push', -> await @clean() describe 'mode', -> - return unless tags.lxd + return unless test.tags.lxd they 'absolute mode', ({ssh}) -> nikita @@ -205,7 +206,7 @@ describe 'lxc.file.push', -> await @lxc.delete 'nikita-file-push-6', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-file-push-6' start: true await @lxc.exec diff --git a/packages/lxd/test/file/read.coffee b/packages/lxd/test/file/read.coffee index 03b8f9604..eb5950426 100644 --- a/packages/lxd/test/file/read.coffee +++ b/packages/lxd/test/file/read.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.file.read', -> + return unless test.tags.lxd they 'file with content', ({ssh}) -> nikita @@ -15,7 +15,7 @@ describe 'lxc.file.read', -> @lxc.delete 'nikita-file-read-1', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-file-read-1' start: true await @lxc.exec @@ -36,14 +36,13 @@ describe 'lxc.file.read', -> @lxc.delete 'nikita-file-read-2', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-file-read-2' start: true await @lxc.exec command: "touch /root/a_file" container: 'nikita-file-read-2' {data} = await @lxc.file.read - $debug: true container: 'nikita-file-read-2' target: '/root/a_file' data.should.eql '' @@ -57,7 +56,7 @@ describe 'lxc.file.read', -> @lxc.delete 'nikita-file-read-3', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-file-read-3' start: true await @lxc.exec diff --git a/packages/lxd/test/goodies/prlimit.coffee b/packages/lxd/test/goodies/prlimit.coffee index 5e4509f08..7d1992863 100644 --- a/packages/lxd/test/goodies/prlimit.coffee +++ b/packages/lxd/test/goodies/prlimit.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd_prlimit +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.goodie.prlimit', -> + return unless test.tags.lxd_prlimit they 'stdout', ({ssh}) -> nikita @@ -15,7 +15,7 @@ describe 'lxc.goodie.prlimit', -> container: 'nikita-goodies-prlimit-1' force: true await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-goodies-prlimit-1' start: true await @lxc.goodies.prlimit diff --git a/packages/lxd/test/info.coffee b/packages/lxd/test/info.coffee index 566842f80..32cfc5dd6 100644 --- a/packages/lxd/test/info.coffee +++ b/packages/lxd/test/info.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require './test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.info', -> + return unless test.tags.lxd they 'argument is a string', ({ssh}) -> await nikita.lxc.info 'nikita-info-1', ({config}) -> @@ -19,7 +19,7 @@ describe 'lxc.info', -> @lxc.delete 'nikita-info-2', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-info-2' await @lxc.info 'nikita-info-2' .should.finally.match data: name: 'nikita-info-2' diff --git a/packages/lxd/test/init.coffee b/packages/lxd/test/init.coffee index 3bb10d335..e421b4daf 100644 --- a/packages/lxd/test/init.coffee +++ b/packages/lxd/test/init.coffee @@ -1,18 +1,18 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require './test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.init', -> + return unless test.tags.lxd describe 'schema', -> it 'Container name is between 1 and 63 characters long', -> nikita .lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: "very-long-long-long-long-long-long-long-long-long-long-long-long-long-name" .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' @@ -27,7 +27,7 @@ describe 'lxc.init', -> it 'Container name accepts letters, numbers and dashes from the ASCII table', -> nikita .lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'my_name' .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' @@ -41,7 +41,7 @@ describe 'lxc.init', -> it 'Container name must not start with a digit', -> nikita.lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: '1u' .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' @@ -55,7 +55,7 @@ describe 'lxc.init', -> it 'Container name must not start with a dash', -> nikita.lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: '-u1' .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' @@ -70,7 +70,7 @@ describe 'lxc.init', -> it 'Container name is not end with a dash', -> nikita .lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'u1-' .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' @@ -92,7 +92,7 @@ describe 'lxc.init', -> @lxc.delete 'nikita-init-1', force: true await @clean() {$status} = await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-init-1' $status.should.be.true() await @clean() @@ -105,7 +105,7 @@ describe 'lxc.init', -> @lxc.delete 'nikita-init-2', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-init-2' start: true {$status} = await @lxc.running @@ -121,7 +121,7 @@ describe 'lxc.init', -> @lxc.delete 'nikita-init-3', force: true await @clean() {$status} = await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-init-3' $status.should.be.true() await @clean() @@ -134,16 +134,16 @@ describe 'lxc.init', -> @lxc.delete 'nikita-init-4', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-init-4' {$status} = await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-init-4' $status.should.be.false() await @clean() describe 'vm', -> - return unless tags.lxd_vm + return unless test.tags.lxd_vm they 'Init new VM', ({ssh}) -> nikita @@ -153,7 +153,7 @@ describe 'lxc.init', -> @lxc.delete 'nikita-init-vm1', force: true await @clean() {$status} = await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-init-vm1' vm: true $status.should.be.true() @@ -167,11 +167,11 @@ describe 'lxc.init', -> @lxc.delete 'nikita-init-vm2', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-init-vm2' vm: true {$status} = await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-init-vm2' vm: true $status.should.be.false() diff --git a/packages/lxd/test/list.coffee b/packages/lxd/test/list.coffee index 9ed4af235..b066077e2 100644 --- a/packages/lxd/test/list.coffee +++ b/packages/lxd/test/list.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require './test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.list', -> + return unless test.tags.lxd they 'list all instances', ({ssh}) -> nikita @@ -16,18 +16,18 @@ describe 'lxc.list', -> @lxc.delete 'nikita-list-vm1', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-list-c1' await @lxc.init - $if: tags.lxd_vm - image: "images:#{images.alpine}" + $if: test.tags.lxd_vm + image: "images:#{test.images.alpine}" container: 'nikita-list-vm1' vm: true await @wait time: 200 {$status, list} = await @lxc.list() $status.should.be.true() list.should.containEql 'nikita-list-c1' - list.should.containEql 'nikita-list-vm1' if tags.lxd_vm + list.should.containEql 'nikita-list-vm1' if test.tags.lxd_vm await @clean() describe 'option `filter`', -> @@ -41,18 +41,18 @@ describe 'lxc.list', -> @lxc.delete 'nikita-list-vm1', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-list-c1' await @lxc.init - $if: tags.lxd_vm - image: "images:#{images.alpine}" + $if: test.tags.lxd_vm + image: "images:#{test.images.alpine}" container: 'nikita-list-vm1' vm: true {$status, list} = await @lxc.list filter: 'containers' $status.should.be.true() list.should.containEql 'nikita-list-c1' - list.should.not.containEql 'nikita-list-vm1' if tags.lxd_vm + list.should.not.containEql 'nikita-list-vm1' if test.tags.lxd_vm await @clean() they 'when `virtual-machines`, only display VMs', ({ssh}) -> @@ -64,16 +64,16 @@ describe 'lxc.list', -> @lxc.delete 'nikita-list-vm1', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-list-c1' await @lxc.init - $if: tags.lxd_vm - image: "images:#{images.alpine}" + $if: test.tags.lxd_vm + image: "images:#{test.images.alpine}" container: 'nikita-list-vm1' vm: true {$status, list} = await @lxc.list filter: 'virtual-machines' $status.should.be.true() list.should.not.containEql 'nikita-list-c1' - list.should.containEql 'nikita-list-vm1' if tags.lxd_vm + list.should.containEql 'nikita-list-vm1' if test.tags.lxd_vm await @clean() diff --git a/packages/lxd/test/network/attach.coffee b/packages/lxd/test/network/attach.coffee index ce9652c29..885d5d61a 100644 --- a/packages/lxd/test/network/attach.coffee +++ b/packages/lxd/test/network/attach.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.network.attach', -> + return unless test.tags.lxd they 'Attach a network to a container', ({ssh}) -> nikita @@ -20,7 +20,7 @@ describe 'lxc.network.attach', -> try await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'u0' await @lxc.network network: "nkt-attach-1" @@ -44,7 +44,7 @@ describe 'lxc.network.attach', -> await @clean() try await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'u0' await @lxc.network network: "nkt-attach-2" diff --git a/packages/lxd/test/network/delete.coffee b/packages/lxd/test/network/delete.coffee index 45af99ff5..0ff2fa590 100644 --- a/packages/lxd/test/network/delete.coffee +++ b/packages/lxd/test/network/delete.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.network.delete', -> + return unless test.tags.lxd they 'Delete a network', ({ssh}) -> nikita diff --git a/packages/lxd/test/network/detach.coffee b/packages/lxd/test/network/detach.coffee index 03eb6d00a..be2015f0e 100644 --- a/packages/lxd/test/network/detach.coffee +++ b/packages/lxd/test/network/detach.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.network.detach', -> + return unless test.tags.lxd they 'Detach a network from a container', ({ssh}) -> nikita @@ -20,7 +20,7 @@ describe 'lxc.network.detach', -> try await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'u0' await @lxc.network network: "nkt-detach-1" @@ -47,7 +47,7 @@ describe 'lxc.network.detach', -> try await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'u0' await @lxc.network network: "nkt-detach-2" diff --git a/packages/lxd/test/network/index.coffee b/packages/lxd/test/network/index.coffee index adfcaae0d..5b4923c25 100644 --- a/packages/lxd/test/network/index.coffee +++ b/packages/lxd/test/network/index.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.network.create', -> + return unless test.tags.lxd they 'schema dns.domain valid', ({ssh}) -> nikita diff --git a/packages/lxd/test/network/list.coffee b/packages/lxd/test/network/list.coffee index 5f7db93f5..ba345c0ef 100644 --- a/packages/lxd/test/network/list.coffee +++ b/packages/lxd/test/network/list.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.network.list', -> + return unless test.tags.lxd they 'list all networks', ({ssh}) -> nikita diff --git a/packages/lxd/test/query.coffee b/packages/lxd/test/query.coffee index 05d148363..944d90df5 100644 --- a/packages/lxd/test/query.coffee +++ b/packages/lxd/test/query.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, images} = require './test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.query', -> + return unless test.tags.lxd describe 'base options', -> @@ -72,7 +72,7 @@ describe 'lxc.query', -> @lxc.delete 'nikita-query-1', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-query-1' start: true {$status, data} = await @lxc.query diff --git a/packages/lxd/test/resources.coffee b/packages/lxd/test/resources.coffee index a857f7073..86cefc289 100644 --- a/packages/lxd/test/resources.coffee +++ b/packages/lxd/test/resources.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.resources', -> + return unless test.tags.lxd they "check the cpu and the memory", ({ssh}) -> nikita diff --git a/packages/lxd/test/running.coffee b/packages/lxd/test/running.coffee index b12c9a708..7b72aa909 100644 --- a/packages/lxd/test/running.coffee +++ b/packages/lxd/test/running.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require './test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.running', -> + return unless test.tags.lxd they 'argument is a string', ({ssh}) -> await nikita.lxc.running 'nikita-running-1', ({config}) -> @@ -19,7 +19,7 @@ describe 'lxc.running', -> @lxc.delete 'nikita-running-2', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-running-2' start: true {$status} = await @lxc.running @@ -35,7 +35,7 @@ describe 'lxc.running', -> @lxc.delete 'nikita-running-3', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-running-3' {$status} = await @lxc.running container: 'nikita-running-3' diff --git a/packages/lxd/test/start.coffee b/packages/lxd/test/start.coffee index 42905e021..ab4be6e08 100644 --- a/packages/lxd/test/start.coffee +++ b/packages/lxd/test/start.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require './test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.start', -> + return unless test.tags.lxd they 'argument is a string', ({ssh}) -> await nikita.lxc.start 'nikita-start-1', ({config}) -> @@ -19,7 +19,7 @@ describe 'lxc.start', -> @lxc.delete 'nikita-start-2', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-start-2' {$status} = await @lxc.start container: 'nikita-start-2' @@ -34,7 +34,7 @@ describe 'lxc.start', -> @lxc.delete 'nikita-start-3', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-start-3' await @lxc.start container: 'nikita-start-3' diff --git a/packages/lxd/test/state.coffee b/packages/lxd/test/state.coffee index 624fe604f..9e433612f 100644 --- a/packages/lxd/test/state.coffee +++ b/packages/lxd/test/state.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require './test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.state', -> + return unless test.tags.lxd they 'argument is a string', ({ssh}) -> await nikita.lxc.state 'nikita-state-1', ({config}) -> @@ -19,7 +19,7 @@ describe 'lxc.state', -> @lxc.delete 'nikita-state-2', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-state-2' {$status, config} = await @lxc.state container: 'nikita-state-2' diff --git a/packages/lxd/test/stop.coffee b/packages/lxd/test/stop.coffee index 749327104..cc338a9b9 100644 --- a/packages/lxd/test/stop.coffee +++ b/packages/lxd/test/stop.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require './test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.stop', -> + return unless test.tags.lxd they 'argument is a string', ({ssh}) -> await nikita.lxc.stop 'nikita-stop-1', ({config}) -> @@ -19,7 +19,7 @@ describe 'lxc.stop', -> @lxc.delete 'nikita-stop-2', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-stop-2' {$status} = await @lxc.stop container: 'nikita-stop-2' @@ -34,7 +34,7 @@ describe 'lxc.stop', -> @lxc.delete 'nikita-stop-3', force: true await @clean() await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-stop-3' start: true {$status} = await @lxc.stop diff --git a/packages/lxd/test/storage/delete.coffee b/packages/lxd/test/storage/delete.coffee index c07df57c6..baa8c9ad2 100644 --- a/packages/lxd/test/storage/delete.coffee +++ b/packages/lxd/test/storage/delete.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.storage.delete', -> + return unless test.tags.lxd they 'Delete a storage', ({ssh}) -> nikita diff --git a/packages/lxd/test/storage/exists.coffee b/packages/lxd/test/storage/exists.coffee index 120de332b..3be6e2d57 100644 --- a/packages/lxd/test/storage/exists.coffee +++ b/packages/lxd/test/storage/exists.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.storage.exists', -> + return unless test.tags.lxd they 'argument is a string', ({ssh}) -> await nikita.lxc.storage.exists 'nikita-storage-exists-1', ({config}) -> diff --git a/packages/lxd/test/storage/index.coffee b/packages/lxd/test/storage/index.coffee index 4451e8bbe..33b15e3dc 100644 --- a/packages/lxd/test/storage/index.coffee +++ b/packages/lxd/test/storage/index.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.storage', -> + return unless test.tags.lxd they 'Create a storage', ({ssh}) -> nikita diff --git a/packages/lxd/test/storage/list.coffee b/packages/lxd/test/storage/list.coffee index 0e215bd0a..eb6f6f8be 100644 --- a/packages/lxd/test/storage/list.coffee +++ b/packages/lxd/test/storage/list.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.storage.list', -> + return unless test.tags.lxd they 'List storages', ({ssh}) -> nikita diff --git a/packages/lxd/test/storage/volume/attach.coffee b/packages/lxd/test/storage/volume/attach.coffee index 47f9339db..1c9c6c746 100644 --- a/packages/lxd/test/storage/volume/attach.coffee +++ b/packages/lxd/test/storage/volume/attach.coffee @@ -1,10 +1,10 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.storage.volume.attach', -> + return unless test.tags.lxd describe 'attach', -> @@ -30,7 +30,7 @@ describe 'lxc.storage.volume.attach', -> pool: 'nikita-storage-attach-1' # Create instance await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-container-attach-1' # Attach volume to instance {$status} = await @lxc.storage.volume.attach @@ -47,7 +47,7 @@ describe 'lxc.storage.volume.attach', -> data.devices.should.containEql {'osd': {type: 'disk', source: 'nikita-volume-attach-1', pool: 'nikita-storage-attach-1', path: '/osd/'}} await @clean() - return unless tags.lxd_vm + return unless test.tags.lxd_vm they 'should attach a block volume on a vm', ({ssh}) -> nikita @@ -72,7 +72,7 @@ describe 'lxc.storage.volume.attach', -> content: 'block' # Create instance await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-container-attach-2' vm:true # Attach volume to instance @@ -113,7 +113,7 @@ describe 'lxc.storage.volume.attach', -> pool: 'nikita-storage-attach-1' # Create instance await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-container-attach-1' # Attach volume to instance await @lxc.storage.volume.attach @@ -124,7 +124,7 @@ describe 'lxc.storage.volume.attach', -> .should.be.rejectedWith /^Missing requirement: Path is required for filesystem type volumes./ await @clean() - return unless tags.lxd_vm + return unless test.tags.lxd_vm they 'should attach a filesystem to a vm', ({ssh}) -> nikita @@ -148,7 +148,7 @@ describe 'lxc.storage.volume.attach', -> pool: 'nikita-storage-attach-2' # Create instance await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-container-attach-2' vm: true # Attach volume to instance @@ -184,7 +184,7 @@ describe 'lxc.storage.volume.attach', -> content: 'block' # Create instance await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-container-attach-3' # Attach volume to instance await @lxc.storage.volume.attach @@ -214,7 +214,7 @@ describe 'lxc.storage.volume.attach', -> name: 'nikita-storage-attach-4' driver: "zfs" await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-container-attach-4' # Attach volume to instance await @lxc.storage.volume.attach diff --git a/packages/lxd/test/storage/volume/delete.coffee b/packages/lxd/test/storage/volume/delete.coffee index fdb2e8c59..cf8b113dd 100644 --- a/packages/lxd/test/storage/volume/delete.coffee +++ b/packages/lxd/test/storage/volume/delete.coffee @@ -1,10 +1,10 @@ -nikita = require '@nikitajs/core/lib' -{config, tags} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.storage.volume.delete', -> + return unless test.tags.lxd they 'delete a volume', ({ssh}) -> nikita diff --git a/packages/lxd/test/storage/volume/get.coffee b/packages/lxd/test/storage/volume/get.coffee index bb14dbce7..d3221b72e 100644 --- a/packages/lxd/test/storage/volume/get.coffee +++ b/packages/lxd/test/storage/volume/get.coffee @@ -1,10 +1,10 @@ -nikita = require '@nikitajs/core/lib' -{config, tags} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.storage.volume.get', -> + return unless test.tags.lxd they 'get a volume', ({ssh}) -> nikita diff --git a/packages/lxd/test/storage/volume/index.coffee b/packages/lxd/test/storage/volume/index.coffee index 3e24bf304..c6503c722 100644 --- a/packages/lxd/test/storage/volume/index.coffee +++ b/packages/lxd/test/storage/volume/index.coffee @@ -1,10 +1,10 @@ -nikita = require '@nikitajs/core/lib' -{config, tags} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.storage.volume', -> + return unless test.tags.lxd describe 'volume creation', -> diff --git a/packages/lxd/test/storage/volume/list.coffee b/packages/lxd/test/storage/volume/list.coffee index 092173913..57d79dcd1 100644 --- a/packages/lxd/test/storage/volume/list.coffee +++ b/packages/lxd/test/storage/volume/list.coffee @@ -1,10 +1,10 @@ -nikita = require '@nikitajs/core/lib' -{config, tags} = require '../../test' -they = require('mocha-they')(config) - -return unless tags.lxd +import nikita from '@nikitajs/core' +import test from '../../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.storage.volume.list', -> + return unless test.tags.lxd they 'list all volumes in a pool', ({ssh}) -> nikita diff --git a/packages/lxd/test/test.coffee b/packages/lxd/test/test.coffee index c4d70ef69..ccc602447 100644 --- a/packages/lxd/test/test.coffee +++ b/packages/lxd/test/test.coffee @@ -1,45 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config - -# Cache container and vm images - -return unless config.tags.lxd -nikita = require '@nikitajs/core/lib' -they = require('mocha-they')(config.config) - -they 'cache container image to avoid timeout later', ({ssh}) -> - @timeout 0 - nikita - $ssh: ssh - .execute - command: "lxc image copy images:#{config.images.alpine} `lxc remote get-default`:" - -return unless config.tags.lxd_vm -they 'cache vm image to avoid timeout later', ({ssh}) -> - @timeout 0 - nikita - $ssh: ssh - .execute - command: "lxc image copy images:#{config.images.alpine} `lxc remote get-default`: --vm" - # It takes time to retrieve files from a VM image archive the first - # time after downloading. It is way faster for a container image, so - # we don't need it. - .execute - command: """ - lxc info vm1 >/dev/null && exit 42 - echo "" | lxc init images:#{config.images.alpine} vm1 --vm - lxc rm -f vm1 - """ - code: [0, 42] +export default config.default diff --git a/packages/lxd/test/wait/ready.coffee b/packages/lxd/test/wait/ready.coffee index 209614991..c9be5d692 100644 --- a/packages/lxd/test/wait/ready.coffee +++ b/packages/lxd/test/wait/ready.coffee @@ -1,14 +1,13 @@ -nikita = require '@nikitajs/core/lib' -{config, images, tags} = require '../../test' -they = require('mocha-they')(config) - +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'lxc.wait.ready', -> describe 'For containers', -> - - return unless tags.lxd + return unless test.tags.lxd they 'wait for the container to be ready', ({ssh}) -> nikita @@ -20,7 +19,7 @@ describe 'lxc.wait.ready', -> force: true await registry.register 'test', -> await @lxc.init - image: "images:#{images.alpine}" + image: "images:#{test.images.alpine}" container: 'nikita-wait-1' start: true {$status} = await @lxc.wait.ready 'nikita-wait-1' @@ -32,8 +31,7 @@ describe 'lxc.wait.ready', -> await @clean() describe 'For virtual machines', -> - - return unless tags.lxd_vm + return unless test.tags.lxd_vm they 'wait for the virtual machine to be ready', ({ssh}) -> @timeout -1 @@ -89,8 +87,6 @@ describe 'lxc.wait.ready', -> await @test() finally await @clean() - - return unless tags.lxd_vm they 'try to execute a command before booting', ({ssh}) -> @timeout -1 diff --git a/packages/network/README.md b/packages/network/README.md index 2c019f836..b7f3fcb38 100644 --- a/packages/network/README.md +++ b/packages/network/README.md @@ -2,3 +2,21 @@ # Nikita "network" package The "network" package provides Nikita actions for networking operations. + +## Usage + +```js +import "@nikitajs/network/register"; +import nikita from "@nikitajs/core"; + +const {$status} = await nikita.network.tcp.wait({ + server: [ + { host: 'localhost', port: 8080 }, + { host: 'localhost', port: 8081 }, + { host: 'localhost', port: 8082 }, + ], + quorum: true, + interval: 200, +}); +console.info("Network available on first connection attempt:", !$status); +``` diff --git a/packages/network/lib/http/index.js b/packages/network/lib/http/index.js index 1ebd8e8ff..57862867e 100644 --- a/packages/network/lib/http/index.js +++ b/packages/network/lib/http/index.js @@ -1,27 +1,28 @@ - - // Dependencies -const dedent = require('dedent'); -const url = require('url'); -const utils = require('../utils'); -const definitions = require('./schema.json'); -const esa = utils.string.escapeshellarg; +import dedent from "dedent"; +import utils from "@nikitajs/network/utils"; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { - handler: async function({config}) { +export default { + handler: async function ({ config }) { if (config.principal && !config.password) { - throw Error("Required Option: `password` is required if principal is provided"); + throw Error( + "Required Option: `password` is required if principal is provided" + ); } - if ((config.method === 'POST' || config.method === 'PUT') && !config.data) { - throw Error("Required Option: `data` is required with POST and PUT requests"); + if ((config.method === "POST" || config.method === "PUT") && !config.data) { + throw Error( + "Required Option: `data` is required with POST and PUT requests" + ); } - if ((config.data != null) && typeof config.data !== 'string') { - if (config.http_headers['Accept'] == null) { - config.http_headers['Accept'] = 'application/json'; + if (config.data != null && typeof config.data !== "string") { + if (config.http_headers["Accept"] == null) { + config.http_headers["Accept"] = "application/json"; } - if ( config.http_headers['Content-Type'] == null) { - config.http_headers['Content-Type'] = 'application/json'; + if (config.http_headers["Content-Type"] == null) { + config.http_headers["Content-Type"] = "application/json"; } config.data = JSON.stringify(config.data); } @@ -38,33 +39,53 @@ module.exports = { headers: {}, status_code: void 0, status_message: void 0, - type: void 0 + type: void 0, }; try { - const {stdout} = await this.execute({ + const { stdout } = await this.execute({ command: dedent` - ${!config.principal ? '' : ['echo', config.password, '|', 'kinit', config.principal, '>/dev/null'].join(' ')} + ${ + !config.principal + ? "" + : [ + "echo", + config.password, + "|", + "kinit", + config.principal, + ">/dev/null", + ].join(" ") + } command -v curl >/dev/null || exit 90 ${[ - 'curl', - config.timeout ? `--max-time '${Math.max(config.timeout / 1000)}'` : void 0, - '--include', // Include protocol headers in the output (H/F) - '--silent', // Dont print progression to stderr - config.fail ? '--fail' : void 0, - !config.cacert && config.url.startsWith('https:') ? '--insecure' : void 0, - config.cacert ? '--cacert #{config.cacert}' : void 0, - config.negotiate ? '--negotiate -u:' : void 0, - config.location ? '--location' : void 0, - ...Object.keys(config.http_headers).map(header => `--header ${esa(header+": "+config.http_headers[header])}`), - ...config.cookies.map( cookie => `--cookie ${esa(cookie)}`), + "curl", + config.timeout + ? `--max-time '${Math.max(config.timeout / 1000)}'` + : void 0, + "--include", // Include protocol headers in the output (H/F) + "--silent", // Dont print progression to stderr + config.fail ? "--fail" : void 0, + !config.cacert && config.url.startsWith("https:") + ? "--insecure" + : void 0, + config.cacert ? "--cacert #{config.cacert}" : void 0, + config.negotiate ? "--negotiate -u:" : void 0, + config.location ? "--location" : void 0, + ...Object.keys(config.http_headers).map( + (header) => + `--header ${esa(header + ": " + config.http_headers[header])}` + ), + ...config.cookies.map((cookie) => `--cookie ${esa(cookie)}`), config.target ? `-o ${config.target}` : void 0, config.proxy ? `-x ${config.proxy}` : void 0, - config.method !== 'GET' ? `-X ${config.method}` : void 0, - config.data ? `--data ${utils.string.escapeshellarg(config.data)}` : void 0, - `${config.url}` - ].join(' ')} + config.method !== "GET" ? `-X ${config.method}` : void 0, + config.data + ? `--data ${esa(config.data)}` + : void 0, + `${config.url}`, + ].join(" ")} `, - trap: true + trap: true, }); output.raw = stdout; let done_with_header = false; @@ -72,18 +93,19 @@ module.exports = { if (output.body.length === 0 && /^HTTP\/[\d.]+ \d+/.test(line)) { done_with_header = false; output.headers = {}; - const [http_version, status_code, ...status_message] = line.split(' '); + const [http_version, status_code, ...status_message] = + line.split(" "); output.http_version = http_version.substr(5); output.status_code = parseInt(status_code, 10); - output.status_message = status_message.join(' '); + output.status_message = status_message.join(" "); continue; - } else if (line === '') { + } else if (line === "") { done_with_header = true; continue; } if (!done_with_header) { - const [name, ...value] = line.split(':'); - output.headers[name.trim()] = value.join(':').trim(); + const [name, ...value] = line.split(":"); + output.headers[name.trim()] = value.join(":").trim(); } else { output.body.push(line); } @@ -91,34 +113,40 @@ module.exports = { } catch (err) { const code = utils.curl.error(err.exit_code); if (code) { - throw utils.error(code, [`the curl command exited with code \`${err.exit_code}\`.`]); + throw utils.error(code, [ + `the curl command exited with code \`${err.exit_code}\`.`, + ]); } else if (err.exit_code === 90) { - throw utils.error('NIKITA_NETWORK_DOWNLOAD_CURL_REQUIRED', ['the `curl` command could not be found', 'and is required to perform HTTP requests,', 'make sure it is available in your `$PATH`.']); + throw utils.error("NIKITA_NETWORK_DOWNLOAD_CURL_REQUIRED", [ + "the `curl` command could not be found", + "and is required to perform HTTP requests,", + "make sure it is available in your `$PATH`.", + ]); } else { throw err; } } await this.fs.chmod({ $if: config.target && config.mode, - mode: config.mode + mode: config.mode, }); await this.fs.chown({ - $if: config.target && (config.uid != null) || (config.gid != null), + $if: (config.target && config.uid != null) || config.gid != null, target: config.target, uid: config.uid, - gid: config.gid + gid: config.gid, }); - if (/^application\/json(;|$)/.test(output.headers['Content-Type'])) { - output.type = 'json'; + if (/^application\/json(;|$)/.test(output.headers["Content-Type"])) { + output.type = "json"; } - output.body = output.body.join(''); + output.body = output.body.join(""); switch (output.type) { - case 'json': + case "json": output.data = JSON.parse(output.body); } return output; }, metadata: { - definitions: definitions - } + definitions: definitions, + }, }; diff --git a/packages/network/lib/http/schema.json b/packages/network/lib/http/schema.json index 88b65851f..af497875e 100644 --- a/packages/network/lib/http/schema.json +++ b/packages/network/lib/http/schema.json @@ -25,7 +25,7 @@ "description": "Fail silently (no output at all) on HTTP errors." }, "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/gid", + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/gid", "description": "Group name or id who owns the target file; only apply if `target` is\nprovided." }, "http_headers": { @@ -47,7 +47,7 @@ "description": "Specify request command (HTTP method) to use." }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chmod#/definitions/config/properties/mode", + "$ref": "module://@nikitajs/core/actions/fs/chmod#/definitions/config/properties/mode", "description": "Permissions of the target. If specified, nikita will chmod after\ndownload." }, "negotiate": { @@ -75,11 +75,11 @@ "description": "Write to file instead of stdout; mapped to the curl `output` argument." }, "timeout": { - "$ref": "module://@nikitajs/network/lib/tcp/wait#/definitions/config/properties/timeout", + "$ref": "module://@nikitajs/network/tcp/wait#/definitions/config/properties/timeout", "description": "Maximum time in millisecond for the HTTP request. Prevent the\nrequest from hanging." }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/uid", + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/uid", "description": "User name or id who owns the target file; only apply if `target` is\nprovided." }, "url": { diff --git a/packages/network/lib/http/wait/index.js b/packages/network/lib/http/wait/index.js index 507e65d5c..43e3d3e2b 100644 --- a/packages/network/lib/http/wait/index.js +++ b/packages/network/lib/http/wait/index.js @@ -1,6 +1,6 @@ // Dependencies -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import utils from "@nikitajs/network/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Errors const errors = { @@ -10,7 +10,7 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { const start = Date.now(); config.status_code = config.status_code.map(function (item) { diff --git a/packages/network/lib/http/wait/schema.json b/packages/network/lib/http/wait/schema.json index 4768ca703..52f91373d 100644 --- a/packages/network/lib/http/wait/schema.json +++ b/packages/network/lib/http/wait/schema.json @@ -1,11 +1,11 @@ { "config": { "type": "object", - "$ref": "module://@nikitajs/network/lib/http#/definitions/config", + "$ref": "module://@nikitajs/network/http#/definitions/config", "properties": { "interval": { "default": 2000, - "$ref": "module://@nikitajs/network/lib/tcp/wait#/definitions/config/properties/interval" + "$ref": "module://@nikitajs/network/tcp/wait#/definitions/config/properties/interval" }, "status_code": { "type": "array", @@ -27,7 +27,7 @@ "description": "Accepted status codes. Accepted values are strings and regular\nexpressions. String patterns are defined using the `x` character.\nFor example the value `5xx` accept all HTTP status code from the 5\nclass." }, "timeout": { - "$ref": "module://@nikitajs/network/lib/tcp/wait#/definitions/config/properties/timeout", + "$ref": "module://@nikitajs/network/tcp/wait#/definitions/config/properties/timeout", "description": "Maximum time in millisecond to wait until this action is considered\nto have failed. When defined, the timeout is applied set to http\nrequest to avoid request hanging." } } diff --git a/packages/network/lib/register.js b/packages/network/lib/register.js index ff5bec75f..1eb298a1d 100644 --- a/packages/network/lib/register.js +++ b/packages/network/lib/register.js @@ -1,26 +1,19 @@ // Dependencies -const registry = require('@nikitajs/core/lib/registry'); +import registry from "@nikitajs/core/registry"; // Action registration -module.exports = { +const actions ={ network: { http: { - '': '@nikitajs/network/lib/http', - 'wait': '@nikitajs/network/lib/http/wait' + '': '@nikitajs/network/http', + 'wait': '@nikitajs/network/http/wait' }, tcp: { - 'assert': '@nikitajs/network/lib/tcp/assert', - 'wait': '@nikitajs/network/lib/tcp/wait' + 'assert': '@nikitajs/network/tcp/assert', + 'wait': '@nikitajs/network/tcp/wait' } } }; -(async function() { - try { - return (await registry.register(module.exports)); - } catch (error) { - console.error(error.stack); - return process.exit(1); - } -})(); +await registry.register(actions) diff --git a/packages/network/lib/tcp/assert/index.js b/packages/network/lib/tcp/assert/index.js index 413262fd2..e606bc87b 100644 --- a/packages/network/lib/tcp/assert/index.js +++ b/packages/network/lib/tcp/assert/index.js @@ -1,83 +1,21 @@ - // Dependencies -const definitions = require('./schema.json'); - -// on_action = function({config}) { -// var extract_servers, i, len, ref, srv, srvs; -// if (config.server) { -// if (Array.isArray(config.server)) { -// config.server = utils.array.flatten(config.server); -// } else { -// config.server = [config.server]; -// } -// } -// extract_servers = function(config) { -// var host, i, j, len, len1, port, ref, ref1, servers; -// if (typeof config === 'string') { -// [host, port] = config.split(':'); -// config = { -// host: host, -// port: port -// }; -// } -// if (!config.host || !config.port) { -// return []; -// } -// if (config.host) { -// if (!Array.isArray(config.host)) { -// config.host = [config.host]; -// } -// } -// if (config.port) { -// if (!Array.isArray(config.port)) { -// config.port = [config.port]; -// } -// } -// servers = []; -// ref = config.host || []; -// for (i = 0, len = ref.length; i < len; i++) { -// host = ref[i]; -// ref1 = config.port || []; -// for (j = 0, len1 = ref1.length; j < len1; j++) { -// port = ref1[j]; -// servers.push({ -// host: host, -// port: port -// }); -// } -// } -// return servers; -// }; -// srvs = extract_servers(config); -// if (config.server) { -// ref = config.server; -// for (i = 0, len = ref.length; i < len; i++) { -// srv = ref[i]; -// srvs.push(...extract_servers(srv)); -// } -// } -// config.server = srvs; -// return config.server = utils.array.flatten(config.server); -// }; +import definitions from "./schema.json" assert { type: "json" }; +import wait from "@nikitajs/network/tcp/wait"; // Action -module.exports = { - handler: async function({config}) { - var err, error, i, len, ref, server; - error = null; - ref = config.server; - for (i = 0, len = ref.length; i < len; i++) { - server = ref[i]; +export default { + handler: async function ({ config }) { + let error = null; + for (const server of config.server) { try { await this.execute({ - command: `bash -c 'echo > /dev/tcp/${server.host}/${server.port}'` + command: `bash -c 'echo > /dev/tcp/${server.host}/${server.port}'`, }); if (config.not === true) { error = `Address listening: \"${server.host}:${server.port}\"`; break; } - } catch (error1) { - err = error1; + } catch (err) { if (config.not !== true) { error = `Address not listening: \"${server.host}:${server.port}\"`; break; @@ -90,10 +28,10 @@ module.exports = { return true; }, hooks: { - on_action: require('../wait').hooks.on_action + on_action: wait.hooks.on_action, }, metadata: { shy: true, - definitions: definitions - } + definitions: definitions, + }, }; diff --git a/packages/network/lib/tcp/assert/schema.json b/packages/network/lib/tcp/assert/schema.json index eb94c0d15..1ae90f1c5 100644 --- a/packages/network/lib/tcp/assert/schema.json +++ b/packages/network/lib/tcp/assert/schema.json @@ -3,18 +3,18 @@ "type": "object", "properties": { "host": { - "$ref": "module://@nikitajs/network/lib/tcp/wait#/definitions/config/properties/host", + "$ref": "module://@nikitajs/network/tcp/wait#/definitions/config/properties/host", "description": "Host of the targeted server, could be a FQDN, a hostname or an IP." }, "port": { - "$ref": "module://@nikitajs/network/lib/tcp/wait#/definitions/config/properties/port", + "$ref": "module://@nikitajs/network/tcp/wait#/definitions/config/properties/port", "description": "Port of the targeted server." }, "server": { - "$ref": "module://@nikitajs/network/lib/tcp/wait#/definitions/config/properties/server" + "$ref": "module://@nikitajs/network/tcp/wait#/definitions/config/properties/server" }, "not": { - "$ref": "module://@nikitajs/core/lib/actions/assert#/definitions/config/properties/not" + "$ref": "module://@nikitajs/core/actions/assert#/definitions/config/properties/not" } } } diff --git a/packages/network/lib/tcp/wait/index.js b/packages/network/lib/tcp/wait/index.js index 9efc7cf0f..700a12774 100644 --- a/packages/network/lib/tcp/wait/index.js +++ b/packages/network/lib/tcp/wait/index.js @@ -1,7 +1,7 @@ // Dependencies -const dedent = require('dedent'); -const utils = require('@nikitajs/core/lib/utils'); -const definitions = require('./schema.json'); +import dedent from "dedent"; +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Errors const errors = { @@ -11,7 +11,7 @@ const errors = { }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { if (!config.server?.length) { log({ diff --git a/packages/network/lib/utils/curl.js b/packages/network/lib/utils/curl.js index 4478a6f02..0f2d12f30 100644 --- a/packages/network/lib/utils/curl.js +++ b/packages/network/lib/utils/curl.js @@ -1,168 +1,172 @@ -module.exports = { - // See `man 3 libcurl-errors` - error: function(exit_code) { - switch (exit_code) { - case 1: - return 'CURLE_UNSUPPORTED_PROTOCOL'; - case 2: - return 'CURLE_FAILED_INIT'; - case 3: - return 'CURLE_URL_MALFORMAT'; - case 4: - return 'CURLE_NOT_BUILT_IN'; - case 5: - return 'CURLE_COULDNT_RESOLVE_PROXY'; - case 6: - return 'CURLE_COULDNT_RESOLVE_HOST'; - case 7: - return 'CURLE_COULDNT_CONNECT'; - case 8: - return 'CURLE_FTP_WEIRD_SERVER_REPLY'; - case 9: - return 'CURLE_REMOTE_ACCESS_DENIED'; - case 10: - return 'CURLE_FTP_ACCEPT_FAILED'; - case 11: - return 'CURLE_FTP_WEIRD_PASS_REPLY'; - case 12: - return 'CURLE_FTP_ACCEPT_TIMEOUT'; - case 13: - return 'CURLE_FTP_WEIRD_PASV_REPLY'; - case 14: - return 'CURLE_FTP_WEIRD_227_FORMAT'; - case 15: - return 'CURLE_FTP_CANT_GET_HOST'; - case 17: - return 'CURLE_FTP_COULDNT_SET_TYPE'; - case 18: - return 'CURLE_PARTIAL_FILE'; - case 19: - return 'CURLE_FTP_COULDNT_RETR_FILE'; - case 21: - return 'CURLE_QUOTE_ERROR'; - case 22: - return 'CURLE_HTTP_RETURNED_ERROR'; - case 23: - return 'CURLE_WRITE_ERROR'; - case 25: - return 'CURLE_UPLOAD_FAILED'; - case 26: - return 'CURLE_READ_ERROR'; - case 27: - return 'CURLE_OUT_OF_MEMORY'; - case 28: - return 'CURLE_OPERATION_TIMEDOUT'; - case 30: - return 'CURLE_FTP_PORT_FAILED'; - case 31: - return 'CURLE_FTP_COULDNT_USE_REST'; - case 33: - return 'CURLE_RANGE_ERROR'; - case 34: - return 'CURLE_HTTP_POST_ERROR'; - case 35: - return 'CURLE_SSL_CONNECT_ERROR'; - case 36: - return 'CURLE_BAD_DOWNLOAD_RESUME'; - case 37: - return 'CURLE_FILE_COULDNT_READ_FILE'; - case 38: - return 'CURLE_LDAP_CANNOT_BIND'; - case 39: - return 'CURLE_LDAP_SEARCH_FAILED'; - case 41: - return 'CURLE_FUNCTION_NOT_FOUND'; - case 42: - return 'CURLE_ABORTED_BY_CALLBACK'; - case 43: - return 'CURLE_BAD_FUNCTION_ARGUMENT'; - case 45: - return 'CURLE_INTERFACE_FAILED'; - case 47: - return 'CURLE_TOO_MANY_REDIRECTS'; - case 48: - return 'CURLE_UNKNOWN_OPTION'; - case 49: - return 'CURLE_TELNET_OPTION_SYNTAX'; - case 51: - return 'CURLE_PEER_FAILED_VERIFICATION'; - case 52: - return 'CURLE_GOT_NOTHING'; - case 53: - return 'CURLE_SSL_ENGINE_NOTFOUND'; - case 54: - return 'CURLE_SSL_ENGINE_SETFAILED'; - case 55: - return 'CURLE_SEND_ERROR'; - case 56: - return 'CURLE_RECV_ERROR'; - case 58: - return 'CURLE_SSL_CERTPROBLEM'; - case 59: - return 'CURLE_SSL_CIPHER'; - case 60: - return 'CURLE_SSL_CACERT'; - case 61: - return 'CURLE_BAD_CONTENT_ENCODING'; - case 62: - return 'CURLE_LDAP_INVALID_URL'; - case 63: - return 'CURLE_FILESIZE_EXCEEDED'; - case 64: - return 'CURLE_USE_SSL_FAILED'; - case 65: - return 'CURLE_SEND_FAIL_REWIND'; - case 66: - return 'CURLE_SSL_ENGINE_INITFAILED'; - case 67: - return 'CURLE_LOGIN_DENIED'; - case 68: - return 'CURLE_TFTP_NOTFOUND'; - case 69: - return 'CURLE_TFTP_PERM'; - case 70: - return 'CURLE_REMOTE_DISK_FULL'; - case 71: - return 'CURLE_TFTP_ILLEGAL'; - case 72: - return 'CURLE_TFTP_UNKNOWNID'; - case 73: - return 'CURLE_REMOTE_FILE_EXISTS'; - case 74: - return 'CURLE_TFTP_NOSUCHUSER'; - case 75: - return 'CURLE_CONV_FAILED'; - case 76: - return 'CURLE_CONV_REQD'; - case 77: - return 'CURLE_SSL_CACERT_BADFILE'; - case 78: - return 'CURLE_REMOTE_FILE_NOT_FOUND'; - case 79: - return 'CURLE_SSH'; - case 80: - return 'CURLE_SSL_SHUTDOWN_FAILED'; - case 81: - return 'CURLE_AGAIN'; - case 82: - return 'CURLE_SSL_CRL_BADFILE'; - case 83: - return 'CURLE_SSL_ISSUER_ERROR'; - case 84: - return 'CURLE_FTP_PRET_FAILED'; - case 85: - return 'CURLE_RTSP_CSEQ_ERROR'; - case 86: - return 'CURLE_RTSP_SESSION_ERROR'; - case 87: - return 'CURLE_FTP_BAD_FILE_LIST'; - case 88: - return 'CURLE_CHUNK_FAILED'; - case 89: - return 'CURLE_NO_CONNECTION_AVAILABLE'; - default: - return void 0; - } +// See `man 3 libcurl-errors` +const error = function(exit_code) { + switch (exit_code) { + case 1: + return 'CURLE_UNSUPPORTED_PROTOCOL'; + case 2: + return 'CURLE_FAILED_INIT'; + case 3: + return 'CURLE_URL_MALFORMAT'; + case 4: + return 'CURLE_NOT_BUILT_IN'; + case 5: + return 'CURLE_COULDNT_RESOLVE_PROXY'; + case 6: + return 'CURLE_COULDNT_RESOLVE_HOST'; + case 7: + return 'CURLE_COULDNT_CONNECT'; + case 8: + return 'CURLE_FTP_WEIRD_SERVER_REPLY'; + case 9: + return 'CURLE_REMOTE_ACCESS_DENIED'; + case 10: + return 'CURLE_FTP_ACCEPT_FAILED'; + case 11: + return 'CURLE_FTP_WEIRD_PASS_REPLY'; + case 12: + return 'CURLE_FTP_ACCEPT_TIMEOUT'; + case 13: + return 'CURLE_FTP_WEIRD_PASV_REPLY'; + case 14: + return 'CURLE_FTP_WEIRD_227_FORMAT'; + case 15: + return 'CURLE_FTP_CANT_GET_HOST'; + case 17: + return 'CURLE_FTP_COULDNT_SET_TYPE'; + case 18: + return 'CURLE_PARTIAL_FILE'; + case 19: + return 'CURLE_FTP_COULDNT_RETR_FILE'; + case 21: + return 'CURLE_QUOTE_ERROR'; + case 22: + return 'CURLE_HTTP_RETURNED_ERROR'; + case 23: + return 'CURLE_WRITE_ERROR'; + case 25: + return 'CURLE_UPLOAD_FAILED'; + case 26: + return 'CURLE_READ_ERROR'; + case 27: + return 'CURLE_OUT_OF_MEMORY'; + case 28: + return 'CURLE_OPERATION_TIMEDOUT'; + case 30: + return 'CURLE_FTP_PORT_FAILED'; + case 31: + return 'CURLE_FTP_COULDNT_USE_REST'; + case 33: + return 'CURLE_RANGE_ERROR'; + case 34: + return 'CURLE_HTTP_POST_ERROR'; + case 35: + return 'CURLE_SSL_CONNECT_ERROR'; + case 36: + return 'CURLE_BAD_DOWNLOAD_RESUME'; + case 37: + return 'CURLE_FILE_COULDNT_READ_FILE'; + case 38: + return 'CURLE_LDAP_CANNOT_BIND'; + case 39: + return 'CURLE_LDAP_SEARCH_FAILED'; + case 41: + return 'CURLE_FUNCTION_NOT_FOUND'; + case 42: + return 'CURLE_ABORTED_BY_CALLBACK'; + case 43: + return 'CURLE_BAD_FUNCTION_ARGUMENT'; + case 45: + return 'CURLE_INTERFACE_FAILED'; + case 47: + return 'CURLE_TOO_MANY_REDIRECTS'; + case 48: + return 'CURLE_UNKNOWN_OPTION'; + case 49: + return 'CURLE_TELNET_OPTION_SYNTAX'; + case 51: + return 'CURLE_PEER_FAILED_VERIFICATION'; + case 52: + return 'CURLE_GOT_NOTHING'; + case 53: + return 'CURLE_SSL_ENGINE_NOTFOUND'; + case 54: + return 'CURLE_SSL_ENGINE_SETFAILED'; + case 55: + return 'CURLE_SEND_ERROR'; + case 56: + return 'CURLE_RECV_ERROR'; + case 58: + return 'CURLE_SSL_CERTPROBLEM'; + case 59: + return 'CURLE_SSL_CIPHER'; + case 60: + return 'CURLE_SSL_CACERT'; + case 61: + return 'CURLE_BAD_CONTENT_ENCODING'; + case 62: + return 'CURLE_LDAP_INVALID_URL'; + case 63: + return 'CURLE_FILESIZE_EXCEEDED'; + case 64: + return 'CURLE_USE_SSL_FAILED'; + case 65: + return 'CURLE_SEND_FAIL_REWIND'; + case 66: + return 'CURLE_SSL_ENGINE_INITFAILED'; + case 67: + return 'CURLE_LOGIN_DENIED'; + case 68: + return 'CURLE_TFTP_NOTFOUND'; + case 69: + return 'CURLE_TFTP_PERM'; + case 70: + return 'CURLE_REMOTE_DISK_FULL'; + case 71: + return 'CURLE_TFTP_ILLEGAL'; + case 72: + return 'CURLE_TFTP_UNKNOWNID'; + case 73: + return 'CURLE_REMOTE_FILE_EXISTS'; + case 74: + return 'CURLE_TFTP_NOSUCHUSER'; + case 75: + return 'CURLE_CONV_FAILED'; + case 76: + return 'CURLE_CONV_REQD'; + case 77: + return 'CURLE_SSL_CACERT_BADFILE'; + case 78: + return 'CURLE_REMOTE_FILE_NOT_FOUND'; + case 79: + return 'CURLE_SSH'; + case 80: + return 'CURLE_SSL_SHUTDOWN_FAILED'; + case 81: + return 'CURLE_AGAIN'; + case 82: + return 'CURLE_SSL_CRL_BADFILE'; + case 83: + return 'CURLE_SSL_ISSUER_ERROR'; + case 84: + return 'CURLE_FTP_PRET_FAILED'; + case 85: + return 'CURLE_RTSP_CSEQ_ERROR'; + case 86: + return 'CURLE_RTSP_SESSION_ERROR'; + case 87: + return 'CURLE_FTP_BAD_FILE_LIST'; + case 88: + return 'CURLE_CHUNK_FAILED'; + case 89: + return 'CURLE_NO_CONNECTION_AVAILABLE'; + default: + return void 0; } }; + +export { error }; + +export default { + error: error +}; diff --git a/packages/network/lib/utils/index.js b/packages/network/lib/utils/index.js index 12e892b58..08b1835cf 100644 --- a/packages/network/lib/utils/index.js +++ b/packages/network/lib/utils/index.js @@ -1,7 +1,8 @@ // Dependencies -const utils = require('@nikitajs/core/lib/utils'); +import utils from "@nikitajs/core/utils"; +import curl from "@nikitajs/network/utils/curl"; -module.exports = { +export default { ...utils, - curl: require('./curl') + curl: curl, }; diff --git a/packages/network/package.json b/packages/network/package.json index f164eef1d..a52f52136 100644 --- a/packages/network/package.json +++ b/packages/network/package.json @@ -51,20 +51,26 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + "./register": "./lib/register.js", + "./utils": "./lib/utils/index.js", + "./utils/*": "./lib/utils/*.js", + "./*": "./lib/*/index.js" + }, "files": [ "/lib" ], "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/network/lib/register" - ], "inline-diffs": true, - "timeout": 10000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/network/register", + "should" + ], + "throw-deprecation": true, + "timeout": 10000 }, "publishConfig": { "access": "public" @@ -82,5 +88,6 @@ }, "dependencies": { "dedent": "^1.2.0" - } + }, + "type": "module" } diff --git a/packages/network/test.sample.coffee b/packages/network/test.sample.coffee index 8efed6669..c43cc3459 100644 --- a/packages/network/test.sample.coffee +++ b/packages/network/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: posix: true config: [ @@ -12,5 +12,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/network/test/http/index.coffee b/packages/network/test/http/index.coffee index 37f5e67a8..c0c966f59 100644 --- a/packages/network/test/http/index.coffee +++ b/packages/network/test/http/index.coffee @@ -1,72 +1,72 @@ -http = require 'http' -{merge} = require 'mixme' -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) +import http from 'node:http' +import {merge} from 'mixme' +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) -return unless tags.posix +describe 'network.http', -> + return unless test.tags.posix -portincr = 22345 -server = -> - _ = null - port = portincr++ - srv = - port: port - listen: -> - new Promise (resolve, reject) -> - _ = http.createServer (req, res) -> - switch req.url - when '/' - res.writeHead 200, 'OK', {'Content-Type': 'application/json'} - res.end '{"key": "value"}' - when '/ping' - body = '' - req.on 'data', (chunk) -> - body += chunk.toString() - req.on 'end', () -> + portincr = 22345 + server = -> + _ = null + port = portincr++ + srv = + port: port + listen: -> + new Promise (resolve, reject) -> + _ = http.createServer (req, res) -> + switch req.url + when '/' res.writeHead 200, 'OK', {'Content-Type': 'application/json'} - res.end body - when '/request_404' - res.writeHead 404, 'Not found' - res.end() - when '/request_301' - res.writeHead 301, 'Moved Permanently', - 'Server': 'Apache/2.4.6 (CentOS) mod_auth_gssapi/1.5.1 mod_nss/1.0.14 NSS/3.28.4 mod_wsgi/3.4 Python/2.7.5' - 'X-Frame-Options': 'DENY' - 'Content-Security-Policy': 'frame-ancestors \'none\'' - 'Location': 'http://ipa.nikita/ipa/session/json' - 'Cache-Control': 'no-cache' - 'Set-Cookie': 'ipa_session=;Max-Age=0;path=/ipa;httponly;secure;' - 'Content-Length': 241 - 'Content-Type': 'text/html; charset=iso-8859-1' - res.end """ - - - 301 Moved Permanently - -

Moved Permanently

-

The document has moved here.

- - """ - when '/follow_redirect_1' - res.writeHead 301, 'Moved Permanently', - 'Location': "http://localhost:#{port}/follow_redirect_2" - res.end() - when '/follow_redirect_2' - res.writeHead 200, 'OK', {'Content-Type': 'application/json'} - res.end '{"key": "value"}' - when '/content_type_with_charset' - res.writeHead 200, 'OK', {'Content-Type': 'application/json; charset=utf-8'} - res.end '{"key": "value"}' - _.listen port - .on 'listening', -> resolve srv - .on 'error', (err) -> reject err - close: -> - new Promise (resolve) -> - _.close resolve - -describe 'network.http', -> + res.end '{"key": "value"}' + when '/ping' + body = '' + req.on 'data', (chunk) -> + body += chunk.toString() + req.on 'end', () -> + res.writeHead 200, 'OK', {'Content-Type': 'application/json'} + res.end body + when '/request_404' + res.writeHead 404, 'Not found' + res.end() + when '/request_301' + res.writeHead 301, 'Moved Permanently', + 'Server': 'Apache/2.4.6 (CentOS) mod_auth_gssapi/1.5.1 mod_nss/1.0.14 NSS/3.28.4 mod_wsgi/3.4 Python/2.7.5' + 'X-Frame-Options': 'DENY' + 'Content-Security-Policy': 'frame-ancestors \'none\'' + 'Location': 'http://ipa.nikita/ipa/session/json' + 'Cache-Control': 'no-cache' + 'Set-Cookie': 'ipa_session=;Max-Age=0;path=/ipa;httponly;secure;' + 'Content-Length': 241 + 'Content-Type': 'text/html; charset=iso-8859-1' + res.end """ + + + 301 Moved Permanently + +

Moved Permanently

+

The document has moved here.

+ + """ + when '/follow_redirect_1' + res.writeHead 301, 'Moved Permanently', + 'Location': "http://localhost:#{port}/follow_redirect_2" + res.end() + when '/follow_redirect_2' + res.writeHead 200, 'OK', {'Content-Type': 'application/json'} + res.end '{"key": "value"}' + when '/content_type_with_charset' + res.writeHead 200, 'OK', {'Content-Type': 'application/json; charset=utf-8'} + res.end '{"key": "value"}' + _.listen port + .on 'listening', -> resolve srv + .on 'error', (err) -> reject err + close: -> + new Promise (resolve) -> + _.close resolve describe 'schema', -> @@ -78,7 +78,7 @@ describe 'network.http', -> await nikita.network.http({url: '#', timeout: 'invalid'}, (->)) .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' - message: 'NIKITA_SCHEMA_VALIDATION_CONFIG: one error was found in the configuration of action `network.http`: module://@nikitajs/network/lib/tcp/wait#/definitions/config/properties/timeout/type config/timeout must be integer, type is "integer".' + message: 'NIKITA_SCHEMA_VALIDATION_CONFIG: one error was found in the configuration of action `network.http`: module://@nikitajs/network/tcp/wait#/definitions/config/properties/timeout/type config/timeout must be integer, type is "integer".' it 'casting', () -> nikita.network.http url: '#', timeout: '1', ({config}) -> diff --git a/packages/network/test/http/wait.coffee b/packages/network/test/http/wait.coffee index f495a828b..34a3e38e2 100644 --- a/packages/network/test/http/wait.coffee +++ b/packages/network/test/http/wait.coffee @@ -1,43 +1,43 @@ -http = require 'http' -url = require 'url' -querystring = require 'querystring' -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) +import http from 'node:http' +import url from 'node:url' +import querystring from 'node:querystring' +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) -return unless tags.posix +describe 'network.http.wait', -> + return unless test.tags.posix -portincr = 22345 -hangTimeout = null -server = -> - _ = null - port = portincr++ - srv = - port: port - listen: -> - countInvalidStatus = 500 - new Promise (resolve, reject) -> - _ = http.createServer (req, res) -> - switch req.url - when '/hang' - hangTimeout = setTimeout((->), 100000) - when '/200' - res.writeHead 200, 'OK', {'Content-Type': 'application/json'} - res.end '{"key": "value"}' - when '/200/invalid/status' - res.writeHead countInvalidStatus, 'OK', {'Content-Type': 'application/json'} - res.end '{"key": "value"}' - countInvalidStatus = countInvalidStatus - 100 - _.listen port - .on 'listening', -> resolve srv - .on 'error', (err) -> reject err - close: -> - new Promise (resolve) -> - clearTimeout hangTimeout if hangTimeout - _.close resolve - -describe 'run', -> + portincr = 22345 + hangTimeout = null + server = -> + _ = null + port = portincr++ + srv = + port: port + listen: -> + countInvalidStatus = 500 + new Promise (resolve, reject) -> + _ = http.createServer (req, res) -> + switch req.url + when '/hang' + hangTimeout = setTimeout((->), 100000) + when '/200' + res.writeHead 200, 'OK', {'Content-Type': 'application/json'} + res.end '{"key": "value"}' + when '/200/invalid/status' + res.writeHead countInvalidStatus, 'OK', {'Content-Type': 'application/json'} + res.end '{"key": "value"}' + countInvalidStatus = countInvalidStatus - 100 + _.listen port + .on 'listening', -> resolve srv + .on 'error', (err) -> reject err + close: -> + new Promise (resolve) -> + clearTimeout hangTimeout if hangTimeout + _.close resolve they 'code 200 with server started', ({ssh}) -> srv = server() @@ -56,7 +56,7 @@ describe 'run', -> $ssh: ssh , ({tools: {events}}) -> events.on 'text', ({attempt, module}) -> - return unless module is '@nikitajs/network/src/http/wait' + return unless module is '@nikitajs/network/http/wait' srv.listen() if attempt is 0 {$status} = await @network.http.wait url: "http://localhost:#{srv.port}/200" diff --git a/packages/network/test/tcp/assert.coffee b/packages/network/test/tcp/assert.coffee index da0754b93..10e7b7aed 100644 --- a/packages/network/test/tcp/assert.coffee +++ b/packages/network/test/tcp/assert.coffee @@ -1,25 +1,25 @@ -http = require 'http' -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix - -portincr = 22445 -server = -> - new Promise (resolve) -> - srv = http.createServer (req, res) -> - res.writeHead 200, {'Content-Type': 'text/plain'} - res.end 'okay' - srv.port = portincr++ - srv.close = ( (fn) -> -> - new Promise (resolve) -> - fn.call srv, resolve - )(srv.close) - srv.listen srv.port, -> resolve srv +import http from 'node:http' +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'network.tcp.assert', -> + return unless test.tags.posix + + portincr = 22445 + server = -> + new Promise (resolve) -> + srv = http.createServer (req, res) -> + res.writeHead 200, {'Content-Type': 'text/plain'} + res.end 'okay' + srv.port = portincr++ + srv.close = ( (fn) -> -> + new Promise (resolve) -> + fn.call srv, resolve + )(srv.close) + srv.listen srv.port, -> resolve srv they 'port and host', ({ssh}) -> try diff --git a/packages/network/test/tcp/wait.coffee b/packages/network/test/tcp/wait.coffee index cdd3409d3..01b3c7fa8 100644 --- a/packages/network/test/tcp/wait.coffee +++ b/packages/network/test/tcp/wait.coffee @@ -1,12 +1,12 @@ -http = require 'http' -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import http from 'node:http' +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'network.tcp.wait', -> + return unless test.tags.posix portincr = 22545 server = -> diff --git a/packages/network/test/test.coffee b/packages/network/test/test.coffee index f17f2b1bb..ccc602447 100644 --- a/packages/network/test/test.coffee +++ b/packages/network/test/test.coffee @@ -1,14 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config +export default config.default diff --git a/packages/nikita/README.md b/packages/nikita/README.md index cc32439f4..03b62d9eb 100644 --- a/packages/nikita/README.md +++ b/packages/nikita/README.md @@ -2,3 +2,48 @@ # Nikita Automation and deployment solution of applications and infrastructures. + +## Example + +```js +import nikita from "nikita"; + +await nikita.log + .cli() + .file.download({ + $header: "Download", + source: "http://download.redis.io/redis-stable.tar.gz", + target: "redis-stable.tar.gz", + cache: true, + cache_dir: "/tmp/.cache", + }) + .execute({ + $header: "Compile", + $unless_exists: "redis-stable/src/redis-server", + command: ` + tar xzf ./redis-stable.tar.gz + cd redis-stable + make + `, + }) + .file.properties({ + $header: "Configure", + target: "conf/redis.conf", + separator: " ", + content: { + bind: "127.0.0.1", + "protected-mode": "yes", + port: 6379, + }, + }) + .execute({ + $debug: true, + $header: "Start", + // bash: true, + code: [0, 42], + command: ` + ./redis-stable/src/redis-cli ping && exit 42 + nohup ./redis-stable/src/redis-server conf/redis.conf & + `, + }); +``` diff --git a/packages/nikita/lib/index.js b/packages/nikita/lib/index.js index 3ab3a4c6b..a0cdd5f86 100644 --- a/packages/nikita/lib/index.js +++ b/packages/nikita/lib/index.js @@ -1,17 +1,18 @@ - // Register actions from Nikita packages -require('@nikitajs/db/lib/register') -require('@nikitajs/docker/lib/register') -require('@nikitajs/file/lib/register') -require('@nikitajs/ipa/lib/register') -require('@nikitajs/java/lib/register') -require('@nikitajs/krb5/lib/register') -require('@nikitajs/ldap/lib/register') -require('@nikitajs/log/lib/register') -require('@nikitajs/lxd/lib/register') -require('@nikitajs/network/lib/register') -require('@nikitajs/service/lib/register') -require('@nikitajs/system/lib/register') -require('@nikitajs/tools/lib/register') +import "@nikitajs/db/register"; +import "@nikitajs/docker/register"; +import "@nikitajs/file/register"; +import "@nikitajs/ipa/register"; +import "@nikitajs/java/register"; +import "@nikitajs/krb5/register"; +import "@nikitajs/ldap/register"; +import "@nikitajs/log/register"; +import "@nikitajs/lxd/register"; +import "@nikitajs/network/register"; +import "@nikitajs/service/register"; +import "@nikitajs/system/register"; +import "@nikitajs/tools/register"; // Expose the Nikita core engine -module.exports = require('@nikitajs/core') +import nikita from "@nikitajs/core"; + +export default nikita; diff --git a/packages/nikita/package.json b/packages/nikita/package.json index 4a6a42a83..fd321b77c 100644 --- a/packages/nikita/package.json +++ b/packages/nikita/package.json @@ -1,5 +1,6 @@ { "name": "nikita", + "version": "1.0.0-alpha.3", "description": "Automation and deployment solution of applications and infrastructures.", "keywords": [ "nikita", @@ -11,7 +12,6 @@ "system", "task" ], - "version": "1.0.0-alpha.3", "author": "David Worms ", "bugs": { "email": "open@adaltas.com", @@ -58,6 +58,9 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + ".": "./lib/index.js" + }, "files": [ "/lib" ], @@ -72,5 +75,6 @@ "scripts": { "test": "npm run test:local", "test:local": "mocha 'test/**/*.js'" - } + }, + "type": "module" } diff --git a/packages/nikita/test/index.js b/packages/nikita/test/index.js index cfe29a4b2..f9b495a53 100644 --- a/packages/nikita/test/index.js +++ b/packages/nikita/test/index.js @@ -1,12 +1,11 @@ +import assert from "node:assert"; +import nikita from "nikita"; -const assert = require('assert') -const nikita = require('..') - -describe('core', () => { - it('load nikita', async () => { - const {stdout} = await nikita.execute({ - command: 'hostname' +describe("core", () => { + it("load nikita", async () => { + const { stdout } = await nikita.execute({ + command: "hostname", }); - assert(typeof stdout === 'string') - }) -}) + assert(typeof stdout === "string"); + }); +}); diff --git a/packages/service/README.md b/packages/service/README.md index 2786300a4..fabb7ef42 100644 --- a/packages/service/README.md +++ b/packages/service/README.md @@ -2,3 +2,18 @@ # Nikita "service" package The "service" package provides Nikita actions for various service management operations. + +## Usage + +```js +import "@nikitajs/service/register"; +import nikita from "@nikitajs/core"; + +const {$status} = await nikita.service({ + name: "nginx-light", + srv_name: "nginx", + chk_name: "nginx", + state: "started", +}); +console.info("Network available on first connection attempt:", !$status); +``` diff --git a/packages/service/env/archlinux/Dockerfile b/packages/service/env/archlinux/Dockerfile index d327c72c0..7e8dffbfe 100644 --- a/packages/service/env/archlinux/Dockerfile +++ b/packages/service/env/archlinux/Dockerfile @@ -1,30 +1,30 @@ FROM archlinux:latest -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " RUN \ - pacman --noconfirm -Syu \ - && pacman --noconfirm -S procps grep which sed zip git + pacman --noconfirm -Syu && \ + pacman --noconfirm -S procps grep which sed zip git # Install Node.js RUN pacman --noconfirm -S nodejs npm # Install SSH and sudo -RUN pacman --noconfirm -S openssh sudo \ - && /usr/bin/ssh-keygen -A +RUN pacman --noconfirm -S openssh sudo && \ + /usr/bin/ssh-keygen -A ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita WORKDIR /nikita/packages/service # Sudo User -RUN useradd nikita -d /home/nikita \ - && mkdir -p /home/nikita \ - && chown nikita /home/nikita \ - && chmod 700 /home/nikita \ - && echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +RUN useradd nikita -d /home/nikita && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita -RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' \ - && cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/service/env/archlinux/docker-compose.yml b/packages/service/env/archlinux/docker-compose.yml index 30764abb2..f6b1f975a 100644 --- a/packages/service/env/archlinux/docker-compose.yml +++ b/packages/service/env/archlinux/docker-compose.yml @@ -3,10 +3,10 @@ version: '3' services: nodejs: build: . - image: nikita_service_archlinux container_name: nikita_service_archlinux - platform: linux/amd64 # Required on Apple M1 - volumes: - - ../../../../:/nikita environment: NIKITA_TEST_MODULE: /nikita/packages/service/env/archlinux/test.coffee + image: nikita_service_archlinux + volumes: + - ../../../../:/nikita + platform: linux/amd64 diff --git a/packages/service/env/archlinux/test.coffee b/packages/service/env/archlinux/test.coffee index 4e0bd293d..8ff9cb25f 100644 --- a/packages/service/env/archlinux/test.coffee +++ b/packages/service/env/archlinux/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: service_install: true service_install_arch: true diff --git a/packages/service/env/centos7/Dockerfile b/packages/service/env/centos7/Dockerfile index 8c90900a7..569874692 100644 --- a/packages/service/env/centos7/Dockerfile +++ b/packages/service/env/centos7/Dockerfile @@ -1,14 +1,14 @@ FROM centos:7.9.2009 -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " RUN \ - # Install Node dependencies - yum install -y git make && \ - # Install Java, OpenSSL, GIT and compression dependencies - yum install -y java openssl git zip unzip bzip2 && \ - # Install SSH and sudo - yum install -y openssh-server openssh-clients sudo && \ - ssh-keygen -A + # Install Node dependencies + yum install -y git make && \ + # Install Java, OpenSSL, GIT and compression dependencies + yum install -y java openssl git zip unzip bzip2 && \ + # Install SSH and sudo + yum install -y openssh-server openssh-clients sudo && \ + ssh-keygen -A RUN yum clean all @@ -18,20 +18,18 @@ WORKDIR /nikita/packages/service # Sudo User RUN useradd nikita -d /home/nikita && \ - mkdir -p /home/nikita && \ - chown nikita /home/nikita && \ - chmod 700 /home/nikita && \ - echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita # Install Node.js # Note, CentOS 7.9.2009 incompatible with Node.js >= 18 -RUN \ - curl -L https://git.io/n-install | bash -s -- -y 16.19 +RUN curl -L https://git.io/n-install | bash -s -- -y 16.19 ENV PATH /home/nikita/n/bin:$PATH -RUN \ - ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ - cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/service/env/centos7/test.coffee b/packages/service/env/centos7/test.coffee index 16290c766..00ecb1123 100644 --- a/packages/service/env/centos7/test.coffee +++ b/packages/service/env/centos7/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: service_install: true service_startup: true diff --git a/packages/service/env/run.sh b/packages/service/env/run.sh index 5ddfa0151..a0a212a59 100755 --- a/packages/service/env/run.sh +++ b/packages/service/env/run.sh @@ -10,5 +10,6 @@ cd `pwd`/`dirname ${BASH_SOURCE}` # "install # specific # add pacman options" ./archlinux/run.sh ./centos7/run.sh -npx coffee ./systemctl/index.coffee run -./ubuntu/run.sh +node ./systemctl/index.js run +./ubuntu-1404/run.sh +./ubuntu-1404-outdated/run.sh diff --git a/packages/service/env/systemctl/index.coffee b/packages/service/env/systemctl/index.coffee deleted file mode 100644 index 3a7d364a5..000000000 --- a/packages/service/env/systemctl/index.coffee +++ /dev/null @@ -1,47 +0,0 @@ - -path = require 'path' -runner = require '@nikitajs/lxd-runner' - -runner - cwd: '/nikita/packages/service' - container: 'nikita-service-systemctl' - logdir: path.resolve __dirname, './logs' - cluster: - containers: - 'nikita-service-systemctl': - image: 'images:centos/7' - properties: - 'environment.NIKITA_TEST_MODULE': '/nikita/packages/service/env/systemctl/test.coffee' - 'raw.idmap': if process.env['NIKITA_LXD_IN_VAGRANT'] - then 'both 1000 0' - else "both #{process.getuid()} 0" - disk: - nikitadir: - path: '/nikita' - source: process.env['NIKITA_HOME'] or path.join(__dirname, '../../../../') - ssh: enabled: true - provision_container: ({config}) -> - await @lxc.exec - $header: 'Node.js' - container: config.container - command: ''' - if command -v node ; then exit 42; fi - curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash - . ~/.bashrc - nvm install 16 - ''' - trap: true - code: [0, 42] - await @lxc.exec - $header: 'SSH keys' - container: config.container - command: """ - mkdir -p /root/.ssh && chmod 700 /root/.ssh - if [ ! -f /root/.ssh/id_rsa ]; then - ssh-keygen -t rsa -f /root/.ssh/id_rsa -N '' - cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys - fi - """ - trap: true -.catch (err) -> - console.error err diff --git a/packages/service/env/systemctl/index.js b/packages/service/env/systemctl/index.js new file mode 100644 index 000000000..d2d61e0cf --- /dev/null +++ b/packages/service/env/systemctl/index.js @@ -0,0 +1,61 @@ +import path from "node:path"; +import dedent from "dedent"; +import runner from "@nikitajs/lxd-runner"; +const dirname = new URL(".", import.meta.url).pathname; + +await runner({ + cwd: "/nikita/packages/service", + container: "nikita-service-systemctl", + logdir: path.resolve(dirname, "./logs"), + cluster: { + containers: { + "nikita-service-systemctl": { + image: "images:rockylinux/9", + properties: { + "environment.NIKITA_TEST_MODULE": + "/nikita/packages/service/env/systemctl/test.coffee", + "raw.idmap": process.env["NIKITA_LXD_IN_VAGRANT"] + ? "both 1000 0" + : `both ${process.getuid()} 0`, + }, + disk: { + nikitadir: { + path: "/nikita", + source: + process.env["NIKITA_HOME"] || path.join(dirname, "../../../../"), + }, + }, + ssh: { + enabled: true, + }, + }, + }, + provision_container: async function ({ config }) { + await this.lxc.exec({ + $header: "Node.js", + container: config.container, + command: dedent` + if command -v node ; then exit 42; fi + dnf install -y tar + curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash + . ~/.bashrc + nvm install 16 + `, + trap: true, + code: [0, 42], + }); + await this.lxc.exec({ + $header: 'SSH keys', + container: config.container, + command: dedent` + mkdir -p /root/.ssh && chmod 700 /root/.ssh + if [ ! -f /root/.ssh/id_ed25519 ]; then + ssh-keygen -t ed25519 -f /root/.ssh/id_ed25519 -N '' + cat /root/.ssh/id_ed25519.pub > /root/.ssh/authorized_keys + fi + `, + trap: true + }); + }, + }, +}); diff --git a/packages/service/env/systemctl/test.coffee b/packages/service/env/systemctl/test.coffee index 64a7d3010..237a9337d 100644 --- a/packages/service/env/systemctl/test.coffee +++ b/packages/service/env/systemctl/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: service_install: true service_startup: true @@ -13,8 +13,8 @@ module.exports = sudo: true , label: 'remote' - sudo: true + # sudo: true ssh: - host: '127.0.0.1', username: process.env.USER, - private_key_path: '~/.ssh/id_rsa' + host: '127.0.0.1', username: 'root', + private_key_path: '~/.ssh/id_ed25519' ] diff --git a/packages/service/env/ubuntu-1404-outdated/docker-compose.yml b/packages/service/env/ubuntu-1404-outdated/docker-compose.yml new file mode 100644 index 000000000..51309a733 --- /dev/null +++ b/packages/service/env/ubuntu-1404-outdated/docker-compose.yml @@ -0,0 +1,21 @@ + +version: '3' +services: + nodejs: + build: + context: . + dockerfile: ./nodejs/Dockerfile + image: nikita_service_ubuntu_nodejs + container_name: nikita_service_ubuntu_outdated_nodejs + depends_on: + - target + volumes: + - ../../../../:/nikita + environment: + NIKITA_TEST_MODULE: /nikita/packages/service/env/ubuntu-1404-outdated/test.coffee + target: + build: + context: . + dockerfile: ./target/Dockerfile + image: nikita_service_ubuntu_outdated_target + container_name: nikita_service_ubuntu_outdated_target diff --git a/packages/service/env/ubuntu-1404-outdated/entrypoint.sh b/packages/service/env/ubuntu-1404-outdated/entrypoint.sh new file mode 100755 index 000000000..b398013e7 --- /dev/null +++ b/packages/service/env/ubuntu-1404-outdated/entrypoint.sh @@ -0,0 +1,19 @@ +#!/bin/bash +set -e + +# Source Node.js +. ~/.bashrc +# We have TTY, so probably an interactive container... +if test -t 0; then + if [[ $@ ]]; then + # Transfer arguments to mocha + node_modules/.bin/mocha $@ + else + # Run bash when no argument + export PS1='[\u@\h : \w]\$ ' + /bin/bash + fi +# Detached mode +else + npm run test:local +fi diff --git a/packages/service/env/ubuntu-1404-outdated/nodejs/Dockerfile b/packages/service/env/ubuntu-1404-outdated/nodejs/Dockerfile new file mode 100644 index 000000000..f284e2039 --- /dev/null +++ b/packages/service/env/ubuntu-1404-outdated/nodejs/Dockerfile @@ -0,0 +1,27 @@ +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " + +ARG DEBIAN_FRONTEND=nonintercative +RUN \ + apt update -y && \ + # Install Node.js dependencies + apt install -y build-essential curl git iputils-ping + +ADD ./entrypoint.sh /entrypoint.sh +RUN mkdir -p /nikita +WORKDIR /nikita/packages/service + +# User +RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita +USER nikita + +# Install Node.js +ENV NODE_VERSION stable +RUN curl -L https://git.io/n-install | bash -s -- -y +# Note, bashrc not sourced unless running interactively +ENV PATH /home/nikita/n/bin:$PATH + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/service/env/ubuntu-1404-outdated/run.sh b/packages/service/env/ubuntu-1404-outdated/run.sh new file mode 100755 index 000000000..3eaa8bd58 --- /dev/null +++ b/packages/service/env/ubuntu-1404-outdated/run.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +cd `pwd`/`dirname ${BASH_SOURCE}` +docker compose up --abort-on-container-exit diff --git a/packages/service/env/ubuntu-1404-outdated/target/Dockerfile b/packages/service/env/ubuntu-1404-outdated/target/Dockerfile new file mode 100644 index 000000000..6cc559201 --- /dev/null +++ b/packages/service/env/ubuntu-1404-outdated/target/Dockerfile @@ -0,0 +1,21 @@ +FROM ubuntu:14.04.5 +LABEL org.opencontainers.image.authors="David Worms " + +RUN \ + apt update && \ + # Install SSH and sudo + apt-get install -y openssh-server sudo && \ + ssh-keygen -A && \ + mkdir -p /run/sshd + +# Sudo User +RUN useradd nikita -d /home/nikita && \ + hash=$(echo "secret" | openssl passwd -1 -stdin) && \ + usermod --pass="$hash" nikita && \ + mkdir -p /home/nikita && \ + mkdir -p /home/nikita/.ssh && \ + chown -R nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita + +ENTRYPOINT ["sudo", "/usr/sbin/sshd", "-D"] diff --git a/packages/service/env/ubuntu-1404-outdated/test.coffee b/packages/service/env/ubuntu-1404-outdated/test.coffee new file mode 100644 index 000000000..14c0b17b2 --- /dev/null +++ b/packages/service/env/ubuntu-1404-outdated/test.coffee @@ -0,0 +1,17 @@ + +export default + tags: + service_install: false + service_outdated: true + service_startup: false + service_systemctl: false + service: + name: 'apt' + config: [ + label: 'remote' + sudo: true + ssh: + host: 'target' + username: 'nikita' + password: 'secret' + ] diff --git a/packages/service/env/ubuntu-1404/docker-compose.yml b/packages/service/env/ubuntu-1404/docker-compose.yml new file mode 100644 index 000000000..ae9c9e7ab --- /dev/null +++ b/packages/service/env/ubuntu-1404/docker-compose.yml @@ -0,0 +1,21 @@ + +version: '3' +services: + nodejs: + build: + context: . + dockerfile: ./nodejs/Dockerfile + image: nikita_service_ubuntu_nodejs + container_name: nikita_service_ubuntu_nodejs + depends_on: + - target + volumes: + - ../../../../:/nikita + environment: + NIKITA_TEST_MODULE: /nikita/packages/service/env/ubuntu-1404/test.coffee + target: + build: + context: . + dockerfile: ./target/Dockerfile + image: nikita_service_ubuntu_target + container_name: nikita_service_ubuntu_target diff --git a/packages/service/env/ubuntu-1404/entrypoint.sh b/packages/service/env/ubuntu-1404/entrypoint.sh new file mode 100755 index 000000000..b398013e7 --- /dev/null +++ b/packages/service/env/ubuntu-1404/entrypoint.sh @@ -0,0 +1,19 @@ +#!/bin/bash +set -e + +# Source Node.js +. ~/.bashrc +# We have TTY, so probably an interactive container... +if test -t 0; then + if [[ $@ ]]; then + # Transfer arguments to mocha + node_modules/.bin/mocha $@ + else + # Run bash when no argument + export PS1='[\u@\h : \w]\$ ' + /bin/bash + fi +# Detached mode +else + npm run test:local +fi diff --git a/packages/service/env/ubuntu-1404/nodejs/Dockerfile b/packages/service/env/ubuntu-1404/nodejs/Dockerfile new file mode 100644 index 000000000..16e5ab647 --- /dev/null +++ b/packages/service/env/ubuntu-1404/nodejs/Dockerfile @@ -0,0 +1,27 @@ +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " + +ARG DEBIAN_FRONTEND=nonintercative +RUN \ + apt update && \ + # Install Node.js dependencies + apt install -y build-essential curl git iputils-ping + +ADD ./entrypoint.sh /entrypoint.sh +RUN mkdir -p /nikita +WORKDIR /nikita/packages/service + +# User +RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita +USER nikita + +# Install Node.js +ENV NODE_VERSION stable +RUN curl -L https://git.io/n-install | bash -s -- -y +# Note, bashrc not sourced unless running interactively +ENV PATH /home/nikita/n/bin:$PATH + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/service/env/ubuntu-1404/run.sh b/packages/service/env/ubuntu-1404/run.sh new file mode 100755 index 000000000..3eaa8bd58 --- /dev/null +++ b/packages/service/env/ubuntu-1404/run.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +cd `pwd`/`dirname ${BASH_SOURCE}` +docker compose up --abort-on-container-exit diff --git a/packages/service/env/ubuntu-1404/target/Dockerfile b/packages/service/env/ubuntu-1404/target/Dockerfile new file mode 100644 index 000000000..1c17acfca --- /dev/null +++ b/packages/service/env/ubuntu-1404/target/Dockerfile @@ -0,0 +1,21 @@ +FROM ubuntu:14.04 +LABEL org.opencontainers.image.authors="David Worms " + +RUN \ + apt update && \ + # Install SSH and sudo + apt-get install -y openssh-server sudo && \ + ssh-keygen -A && \ + mkdir -p /run/sshd + +# Sudo User +RUN useradd nikita -d /home/nikita && \ + hash=$(echo "secret" | openssl passwd -1 -stdin) && \ + usermod --pass="$hash" nikita && \ + mkdir -p /home/nikita && \ + mkdir -p /home/nikita/.ssh && \ + chown -R nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita + +ENTRYPOINT ["sudo", "/usr/sbin/sshd", "-D"] diff --git a/packages/service/env/ubuntu/test.coffee b/packages/service/env/ubuntu-1404/test.coffee similarity index 60% rename from packages/service/env/ubuntu/test.coffee rename to packages/service/env/ubuntu-1404/test.coffee index 5feb9421e..b299532fa 100644 --- a/packages/service/env/ubuntu/test.coffee +++ b/packages/service/env/ubuntu-1404/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: service_install: true service_startup: true @@ -9,12 +9,10 @@ module.exports = srv_name: 'nginx' chk_name: 'nginx' config: [ - label: 'local' - sudo: true - , label: 'remote' sudo: true ssh: - host: '127.0.0.1', username: process.env.USER, - private_key_path: '~/.ssh/id_ed25519' + host: 'target' + username: 'nikita' + password: 'secret' ] diff --git a/packages/service/env/ubuntu/Dockerfile b/packages/service/env/ubuntu/Dockerfile deleted file mode 100644 index 5757345c3..000000000 --- a/packages/service/env/ubuntu/Dockerfile +++ /dev/null @@ -1,37 +0,0 @@ -FROM ubuntu:trusty -MAINTAINER David Worms - -RUN \ - apt update -y && \ - # Install Node.js dependencies - apt install -y build-essential curl git && \ - # Install SSH and sudo - apt-get install -y openssh-server sudo && \ - ssh-keygen -A && \ - mkdir -p /run/sshd - -ADD ./entrypoint.sh /entrypoint.sh -RUN mkdir -p /nikita -WORKDIR /nikita/packages/service - -# Sudo User -RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ - mkdir -p /home/nikita && \ - chown nikita /home/nikita && \ - chmod 700 /home/nikita && \ - echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita -USER nikita - -# Install Node.js -ENV NODE_VERSION stable -RUN \ - curl -L https://git.io/n-install | bash -s -- -y -# Note, bashrc not sourced unless running interactively -# RUN . ~/.bashrc && n $NODE_VERSION -ENV PATH /home/nikita/n/bin:$PATH - -RUN \ - ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ - cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys - -ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/service/env/ubuntu/docker-compose.yml b/packages/service/env/ubuntu/docker-compose.yml deleted file mode 100644 index fe1372a6e..000000000 --- a/packages/service/env/ubuntu/docker-compose.yml +++ /dev/null @@ -1,11 +0,0 @@ - -version: '3' -services: - nodejs: - build: . - image: nikita_service_ubuntu - container_name: nikita_service_ubuntu - volumes: - - ../../../../:/nikita - environment: - NIKITA_TEST_MODULE: /nikita/packages/service/env/ubuntu/test.coffee diff --git a/packages/service/lib/assert/index.js b/packages/service/lib/assert/index.js index dc33bafe8..fb5cf68ef 100644 --- a/packages/service/lib/assert/index.js +++ b/packages/service/lib/assert/index.js @@ -1,9 +1,10 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import utils from '@nikitajs/core/utils' +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (config.srv_name == null) { config.srv_name = config.name; @@ -11,29 +12,16 @@ module.exports = { config.name = [config.name]; // Assert a Package is installed if (config.installed != null) { - try { - await this.execute({ - $shy: true, - command: dedent` - if command -v yum >/dev/null 2>&1; then - rpm -qa --qf "%{NAME}\n" | grep '^${config.name.join('|')}$' - elif command -v pacman >/dev/null 2>&1; then - pacman -Qqe | grep '^${config.name.join('|')}$' - elif command -v apt-get >/dev/null 2>&1; then - dpkg -l | grep \'^ii\' | awk \'{print $2}\' | grep '^${config.name.join('|')}$' - else - echo "Unsupported Package Manager" >&2 - exit 2 - fi - `, - stdin_log: true, - stdout_log: false - }); - } catch (error) { - if (error.exit_code === 2) { - throw Error("Unsupported Package Manager"); - } - throw Error(`Uninstalled Package: ${config.name}`); + const {packages} = await this.service.installed(); + const notInstalled = config.name.filter( pck => !packages.includes(pck)); + if (notInstalled.length) { + throw utils.error("NIKITA_SERVICE_ASSERT_NOT_INSTALLED", [ + notInstalled.length > 1 + ? `services ${notInstalled + .map(JSON.stringify) + .join(", ")} are not installed.` + : `service ${JSON.stringify(notInstalled[0])} is not installed.`, + ]); } } // Assert a Service is started or stopped diff --git a/packages/service/lib/assert/schema.json b/packages/service/lib/assert/schema.json index 699c669b2..88f91148f 100644 --- a/packages/service/lib/assert/schema.json +++ b/packages/service/lib/assert/schema.json @@ -7,7 +7,7 @@ "description": "Assert the package is installed." }, "name": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/name" + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/name" }, "srv_name": { "type": "string", diff --git a/packages/service/lib/discover/index.js b/packages/service/lib/discover/index.js index a50f0c265..e7b241672 100644 --- a/packages/service/lib/discover/index.js +++ b/packages/service/lib/discover/index.js @@ -1,9 +1,9 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, parent: { state } }) { if (state["nikita:service:loader"] != null) { return; diff --git a/packages/service/lib/index.js b/packages/service/lib/index.js index cca3386fb..e1be28dfb 100644 --- a/packages/service/lib/index.js +++ b/packages/service/lib/index.js @@ -1,9 +1,9 @@ // Dependencies -const {merge} = require('mixme'); -const definitions = require("./schema.json"); +import {merge} from 'mixme'; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config, parent, state}) { const pkgname = config.yum_name || config.name; const chkname = config.chk_name || config.srv_name || config.name; @@ -27,21 +27,21 @@ module.exports = { }); } if (config.state) { - const {$status} = await this.service.status({ + const {started} = await this.service.status({ $shy: true, name: srvname }); - if (!$status && config.state.includes('started')) { + if (!started && config.state.includes('started')) { await this.service.start({ name: srvname }); } - if ($status && config.state.includes('stopped') >= 0) { + if (started && config.state.includes('stopped')) { await this.service.stop({ name: srvname }); } - if ($status && config.state.includes('restarted') >= 0) { + if (started && config.state.includes('restarted')) { await this.service.restart({ name: srvname }); diff --git a/packages/service/lib/init/index.js b/packages/service/lib/init/index.js index 03ff15c79..93c44fb57 100644 --- a/packages/service/lib/init/index.js +++ b/packages/service/lib/init/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { path } }) { // check if file is target is directory // detect daemon loader provider to construct target @@ -15,36 +15,35 @@ module.exports = { if (config.target == null) { config.target = `/etc/init.d/${config.name}`; } - const { loader } = await this.service.discover({}); + const { loader } = await this.service.discover(); if (config.loader == null) { config.loader = loader; } // discover loader to put in cache - await this.file.render({ - target: config.target, - source: config.source, - mode: config.mode, - uid: config.uid, - gid: config.gid, + const args = { backup: config.backup, + content: config.content, context: config.context, - local: config.local, engine: config.engine, - }); - if (config.loader !== "systemctl") { - return; + gid: config.gid, + local: config.local, + mode: config.mode, + source: config.source, + target: config.target, + uid: config.uid, } - const { $status } = await this.execute({ - $shy: true, - command: `systemctl status ${config.name} 2>\&1 | egrep '(Reason: No such file or directory)|(Unit ${config.name}.service could not be found)|(${config.name}.service changed on disk)'`, - code: [0, 1], - }); - if (!$status) { - return; + await (config.context ? this.file.render(args) : this.file(args)); + if (config.loader === "systemctl") { + const reload = await this.execute({ + $shy: true, + command: `systemctl status ${config.name} 2>\&1 | egrep '(Reason: No such file or directory)|(Unit ${config.name}.service could not be found)|(${config.name}.service changed on disk)'`, + code: [0, 1], + }).then(({ $status }) => $status); + await this.execute({ + $if: reload, + command: "systemctl daemon-reload; systemctl reset-failed", + }); } - return await this.execute({ - command: "systemctl daemon-reload; systemctl reset-failed", - }); }, metadata: { definitions: definitions, diff --git a/packages/service/lib/init/schema.json b/packages/service/lib/init/schema.json index de01f660b..fcf853870 100644 --- a/packages/service/lib/init/schema.json +++ b/packages/service/lib/init/schema.json @@ -3,28 +3,27 @@ "type": "object", "properties": { "backup": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/backup" + "$ref": "module://@nikitajs/file#/definitions/config/properties/backup" }, "context": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/context", - "default": {}, + "$ref": "module://@nikitajs/file#/definitions/config/properties/context", "description": "The context object used to render the scripts file; templating is\ndisabled if no context is provided." }, "engine": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/engine" + "$ref": "module://@nikitajs/file#/definitions/config/properties/engine" }, "filters": { "typeof": "function", "description": "Filter function to extend the nunjucks engine." }, "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/gid" }, "local": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/local" + "$ref": "module://@nikitajs/file#/definitions/config/properties/local" }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chmod#/definitions/config/properties/mode", + "$ref": "module://@nikitajs/core/actions/fs/chmod#/definitions/config/properties/mode", "default": "755" }, "name": { @@ -32,14 +31,14 @@ "description": "The name of the destination file. Uses the name of the template if\nmissing." }, "source": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/source" + "$ref": "module://@nikitajs/file#/definitions/config/properties/source" }, "target": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/target", + "$ref": "module://@nikitajs/file#/definitions/config/properties/target", "description": "The destination file. `/etc/init.d/crond` or\n`/etc/systemd/system/crond.service` for example. If no provided,\nnikita put it on the default folder based on the service daemon\nprovider,the OS and use the source filename as the name." }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/uid" } }, "required": [ diff --git a/packages/service/lib/install/index.js b/packages/service/lib/install/index.js index e34f99ab7..caf0c5dce 100644 --- a/packages/service/lib/install/index.js +++ b/packages/service/lib/install/index.js @@ -1,22 +1,15 @@ // Dependencies -const dedent = require('dedent'); -const utils = require('@nikitajs/core/lib/utils'); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, parent: { state }, tools: { log } }) { - if (config.cache) { - // Config - if (config.installed == null) { - config.installed = state["nikita:execute:installed"]; - } - } - if (config.cache) { - if (config.outdated == null) { - config.outdated = state["nikita:execute:outdated"]; - } - } + let packages = { + // installed: config.cache ? state["nikita:service:packages:installed"] : undefined, + // installed: config.cache ? state["nikita:service:packages:outdated"] : undefined, + }; const cacheonly = config.cacheonly ? "-C" : ""; for (const i in config.pacman_flags) { const flag = config.pacman_flags[i]; @@ -43,86 +36,23 @@ module.exports = { } } // Start real work - log({ - message: `Install service ${config.name}`, - level: "INFO", - }); + log("INFO", `Install service ${config.name}`); // List installed packages - if (config.installed == null) { - try { - const { $status, stdout } = await this.execute({ - $shy: true, - command: dedent` - if command -v yum >/dev/null 2>&1; then - rpm -qa --qf "%{NAME}\n" - elif command -v pacman >/dev/null 2>&1; then - pacman -Qqe - elif command -v apt-get >/dev/null 2>&1; then - dpkg -l | grep \'^ii\' | awk \'{print $2}\' - else - echo "Unsupported Package Manager" >&2 - exit 2 - fi - `, - code: [0, 1], - stdin_log: false, - stdout_log: false, - }); - if ($status) { - log({ - message: "Installed packages retrieved", - level: "INFO", - }); - config.installed = utils.string.lines(stdout) - } - } catch (error) { - if (error.exit_code === 2) { - throw Error("Unsupported Package Manager"); - } - throw error - } - } + const installed = packages.installed + ? packages.installed.includes(config.name) + : await this.service + .installed(config.name) + .then(({ installed }) => installed); // List packages waiting for update - if (config.outdated == null) { - try { - const { $status, stdout } = await this.execute({ - $shy: true, - command: ` - if command -v yum >/dev/null 2>&1; then - yum ${cacheonly} check-update -q | sed 's/\\([^\\.]*\\).*/\\1/' - elif command -v pacman >/dev/null 2>&1; then - pacman -Qu | sed 's/\\([^ ]*\\).*/\\1/' - elif command -v apt-get >/dev/null 2>&1; then - apt-get -u upgrade --assume-no | grep '^\\s' | sed 's/\\s/\\n/g' - else - echo "Unsupported Package Manager" >&2 - exit 2 - fi - `, - code: [0, 1], - stdin_log: false, - stdout_log: false, - }); - if ($status) { - log({ - message: "Outdated package list retrieved", - level: "INFO", - }); - config.outdated = utils.string.lines(stdout.trim()); - } else { - config.outdated = []; - } - } catch (error) { - if (error.exit_code === 2) { - throw Error("Unsupported Package Manager"); - } - throw error; - } - } + const outdated = packages.outdated + ? packages.outdated.includes(config.name) + : await this.service + .outdated(config.name, { cacheonly: config.cacheonly }) + .then(({ outdated }) => outdated); // Install the package - if (!config.installed.includes(config.name) || config.outdated.includes(config.name)) { + if (!installed || outdated) { try { - const { $status } = await this.execute({ + await this.execute({ command: dedent` if command -v yum >/dev/null 2>&1; then yum install -y ${cacheonly} ${config.name} @@ -139,28 +69,7 @@ module.exports = { `, code: config.code, }); - log( - $status - ? { - message: `Package \"${config.name}\" is installed`, - level: "WARN", - } - : { - message: `Package \"${config.name}\" is already installed`, - level: "INFO", - } - ); - // Enrich installed array with package name unless already there - if (config.installed.includes(config.name)) { - config.installed.push(config.name); - } - // Remove package name from outdated if listed - if (config.outdated) { - const outdatedIndex = config.outdated.indexOf(config.name); - if (outdatedIndex !== -1) { - config.outdated.splice(outdatedIndex, 1); - } - } + log("WARN", `Package \"${config.name}\" is installed`); } catch (error) { if (error.exit_code === 2) { throw Error( @@ -169,27 +78,22 @@ module.exports = { } throw utils.error( "NIKITA_SERVICE_INSTALL", - ["failed to install package,", `name is \`${config.name}\``], - { - target: config.target, - } + ["failed to install package,", `name is ${JSON.stringify(config.name)}`], ); } } + // Enrich installed array with package name unless already there if (config.cache) { - log({ - message: 'Caching installed on "nikita:execute:installed"', - level: "INFO", - }); - state["nikita:execute:installed"] = config.installed; - log({ - message: 'Caching outdated list on "nikita:execute:outdated"', - level: "INFO", - }); - state["nikita:execute:outdated"] = config.outdated; - return { - $status: true, - }; + // if(!installed) { + // log("DEBUG", 'Update installed packages cache.'); + // state["nikita:service:packages:installed"].push(config.name); + // } + // if(oudated) { + // log("DEBUG", 'Update outdated packages cache.'); + // let pcks = packages.outdated; + // pcks = pcks.splice(pcks.indexOf(config.name), 1); + // state["nikita:service:packages:outdated"] = pcks; + // } } }, metadata: { diff --git a/packages/service/lib/install/schema.json b/packages/service/lib/install/schema.json index 225a38e38..06f6a4a4d 100644 --- a/packages/service/lib/install/schema.json +++ b/packages/service/lib/install/schema.json @@ -4,34 +4,20 @@ "properties": { "cache": { "type": "boolean", - "description": "Cache the list of installed and outdated packages." + "description": "TODO, not yet implemented. Cache the list of installed and outdated packages." }, "cacheonly": { "type": "boolean", "description": "Run the yum command entirely from system cache, don't update cache." }, "code": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/code", + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/code", "description": "Error code applied when using nikita.service." }, - "installed": { - "type": "array", - "items": { - "type": "string" - }, - "description": "Cache a list of installed services. If an array, the service will be\ninstalled if a key of the same name exists; if anything else\n(default), no caching will take place." - }, "name": { "type": "string", "description": "Package name, required unless provided as main argument." }, - "outdated": { - "type": "array", - "items": { - "type": "string" - }, - "description": "Cache a list of outdated services. If an array, the service will be\nupdated if a key of the same name exists; If true, the option will be\nconverted to an array with all the outdated service names as keys; if\nanything else (default), no caching will take place." - }, "pacman_flags": { "type": "array", "default": [], diff --git a/packages/service/lib/installed/index.js b/packages/service/lib/installed/index.js new file mode 100644 index 000000000..7411c5622 --- /dev/null +++ b/packages/service/lib/installed/index.js @@ -0,0 +1,65 @@ +// Dependencies +import dedent from "dedent"; +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; + +// Action +export default { + handler: async function ({ config, parent: { state }, tools: { log } }) { + let packages = config.cache ? state["nikita:service:packages:installed"] : undefined; + if (packages !== undefined) { + return { + packages: packages, + } + } + try { + ({ data: packages } = await this.execute({ + $shy: true, + command: dedent` + if command -v rpm >/dev/null 2>&1; then + rpm -qa --qf "%{NAME}\n" + elif command -v pacman >/dev/null 2>&1; then + pacman -Qqe + elif command -v apt >/dev/null 2>&1; then + dpkg -l | grep '^ii' | awk '{print $2}' + else + echo "Unsupported Package Manager" >&2 + exit 2 + fi + `, + // code: [0, 1], + format: ({stdout}) => utils.string.lines(stdout), + stdout_log: false, + })); + log("INFO", "Installed packages retrieved"); + } catch (error) { + if (error.exit_code === 2) { + throw utils.error( + "NIKITA_SERVICE_INSTALLED_UNSUPPORTED_PACKAGE_MANAGER", + "at the moment, rpm (yum, dnf, ...), pacman and dpkg (apt, apt-get, ...) are supported." + ); + } + throw error; + } + if (config.cache) { + log("INFO", 'Caching installed packages.'); + state["nikita:service:packages:installed"] = packages; + } + if(config.name) { + return { + installed: packages.includes(config.name) + } + }else{ + return { + packages: packages, + } + } + }, + metadata: { + argument_to_config: "name", + definitions: definitions, + metadata: { + shy: true + } + }, +}; diff --git a/packages/service/lib/installed/schema.json b/packages/service/lib/installed/schema.json new file mode 100644 index 000000000..225f21a42 --- /dev/null +++ b/packages/service/lib/installed/schema.json @@ -0,0 +1,13 @@ +{ + "config": { + "type": "object", + "properties": { + "cache": { + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/cache" + }, + "name": { + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/name" + } + } + } +} diff --git a/packages/service/lib/outdated/index.js b/packages/service/lib/outdated/index.js new file mode 100644 index 000000000..619c19763 --- /dev/null +++ b/packages/service/lib/outdated/index.js @@ -0,0 +1,82 @@ +// Dependencies +import dedent from "dedent"; +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; + +// Action +export default { + handler: async function ({ metadata, config, parent: { state }, tools: { log } }) { + let packages = config.cache ? state["nikita:service:packages:outdated"] : undefined; + if (packages !== undefined) { + return { + packages: packages, + } + } + const cacheonly = config.cacheonly ? "-C" : ""; + try { + // Error inside the pipeline are not catched (eg no sudo permission). + // A possible solution includes breaking the pipeline into multiple calls. + // Another solution bash-only alternative is to use `set -o pipeline`. + // See https://www.gnu.org/savannah-checkouts/gnu/bash/manual/bash.html + // The apt-get is pretty weak, `apt-get -s -u upgrade` can be executed + // by non-root users but its output will add fake packages to the list + // due to the presence of indented comments. + ({ data: packages } = await this.execute({ + $shy: true, + command: dedent` + if command -v yum >/dev/null 2>&1; then + yum ${cacheonly} check-update -q | sed 's/\\([^\\.]*\\).*/\\1/' + elif command -v pacman >/dev/null 2>&1; then + pacman -Qu | sed 's/\\([^ ]*\\).*/\\1/' + elif command -v apt-get >/dev/null 2>&1; then + apt-get -s -u upgrade | grep '^\s' | sed 's/\\s/\\n/g' + else + echo "Unsupported Package Manager" >&2 + exit 2 + fi + `, + format: ({ stdout }) => + utils.array.flatten( + utils.string + .lines(stdout) + .map((line) => line.split(" ").map(pck => pck.trim()).filter(pck => pck !== '')) + ), + stdout_log: false, + })); + log("INFO", "Outdated packages retrieved"); + } catch (error) { + if (error.exit_code === 43) { + throw utils.error( + "NIKITA_SERVICE_OUTDATED_UNSUPPORTED_PACKAGE_MANAGER", + "at the moment, rpm (yum, dnf, ...), pacman and dpkg (apt, apt-get, ...) are supported." + ); + } else if (error.exit_code === 100) { + throw utils.error( + "NIKITA_SERVICE_OUTDATED_SUDO", + "permission denied, maybe run this command as sudoer or with the `$debug` configuration." + ); + } + throw error; + } + if (config.cache) { + log("INFO", 'Caching outdated packages.'); + state["nikita:service:packages:outdated"] = packages; + } + if(config.name) { + return { + outdated: packages.includes(config.name) + } + }else{ + return { + packages: packages, + } + } + }, + metadata: { + argument_to_config: "name", + definitions: definitions, + metadata: { + shy: true + } + }, +}; diff --git a/packages/service/lib/outdated/schema.json b/packages/service/lib/outdated/schema.json new file mode 100644 index 000000000..de9d92a1a --- /dev/null +++ b/packages/service/lib/outdated/schema.json @@ -0,0 +1,13 @@ +{ + "config": { + "type": "object", + "properties": { + "name": { + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/name" + }, + "cacheonly": { + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/cacheonly" + } + } + } +} diff --git a/packages/service/lib/register.js b/packages/service/lib/register.js index 5aa4a8c28..247415910 100644 --- a/packages/service/lib/register.js +++ b/packages/service/lib/register.js @@ -1,29 +1,25 @@ // Dependencies -const registry = require('@nikitajs/core/lib/registry'); +import registry from "@nikitajs/core/registry"; +import "@nikitajs/file/register"; // Action registration -module.exports = { +const actions = { service: { - '': '@nikitajs/service/lib', - assert: '@nikitajs/service/lib/assert', - discover: '@nikitajs/service/lib/discover', - install: '@nikitajs/service/lib/install', - init: '@nikitajs/service/lib/init', - remove: '@nikitajs/service/lib/remove', - restart: '@nikitajs/service/lib/restart', - start: '@nikitajs/service/lib/start', - startup: '@nikitajs/service/lib/startup', - status: '@nikitajs/service/lib/status', - stop: '@nikitajs/service/lib/stop' + '': '@nikitajs/service', + assert: '@nikitajs/service/assert', + discover: '@nikitajs/service/discover', + install: '@nikitajs/service/install', + installed: '@nikitajs/service/installed', + init: '@nikitajs/service/init', + outdated: '@nikitajs/service/outdated', + remove: '@nikitajs/service/remove', + restart: '@nikitajs/service/restart', + start: '@nikitajs/service/start', + startup: '@nikitajs/service/startup', + status: '@nikitajs/service/status', + stop: '@nikitajs/service/stop' } }; -(async function() { - try { - return (await registry.register(module.exports)); - } catch (error) { - console.error(error.stack); - return process.exit(1); - } -})(); +await registry.register(actions) diff --git a/packages/service/lib/remove/index.js b/packages/service/lib/remove/index.js index 7ef3768fb..c3546aa59 100644 --- a/packages/service/lib/remove/index.js +++ b/packages/service/lib/remove/index.js @@ -1,89 +1,59 @@ // Dependencies -const dedent = require('dedent'); -const utils = require('@nikitajs/core/lib/utils'); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import utils from "@nikitajs/core/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, parent: { state }, tools: { log } }) { - log({ - message: `Remove service ${config.name}`, - level: "INFO", - }); - const cacheonly = config.cacheonly ? "-C" : ""; - let installed = config.cache ? state["nikita:execute:installed"] : null; - if (installed === null) { - try { - const { stdout } = await this.execute({ - $shy: true, - command: dedent` - if command -v yum >/dev/null 2>&1; then - rpm -qa --qf "%{NAME}\n" - elif command -v pacman >/dev/null 2>&1; then - pacman -Qqe - elif command -v apt-get >/dev/null 2>&1; then - dpkg -l | grep \'^ii\' | awk \'{print $2}\' - else - echo "Unsupported Package Manager" >&2 - exit 2 - fi - `, - code: [0, 1], - stdout_log: false, - }); - log({ - message: "Installed packages retrieved", - level: "INFO", - }); - installed = utils.string.lines(stdout); - } catch (error) { - if (error.exit_code === 2) { - throw Error("Unsupported Package Manager"); - } - throw error; - } + const { installed } = await this.service.installed(config.name); + if (!installed) { + log("INFO", `Service ${config.name} not installed`); + return false; } - if (installed.includes(config.name)) { - try { - const { $status } = await this.execute({ - command: ` - if command -v yum >/dev/null 2>&1; then - yum remove -y ${cacheonly} '${config.name}' - elif command -v pacman >/dev/null 2>&1; then - pacman --noconfirm -R ${config.name} - elif command -v apt-get >/dev/null 2>&1; then - apt-get remove -y ${config.name} - else - echo "Unsupported Package Manager: yum, pacman, apt-get supported" >&2 - exit 2 - fi - `, - code: [0, 3], - }); - // Update list of installed packages - installed.splice(installed.indexOf(config.name), 1); - // Log information - log( - $status - ? { - message: "Service removed", - level: "WARN", - } - : { - message: "Service already removed", - level: "INFO", - } - ); - } catch (error) { - throw Error(`Invalid Service Name: ${config.name}`); - } + try { + log("INFO", `Remove service ${config.name}`); + const cacheonly = config.cacheonly ? "-C" : ""; + const { $status } = await this.execute({ + command: ` + if command -v yum >/dev/null 2>&1; then + yum remove -y ${cacheonly} '${config.name}' + elif command -v pacman >/dev/null 2>&1; then + pacman --noconfirm -R ${config.name} + elif command -v apt-get >/dev/null 2>&1; then + apt-get remove -y ${config.name} + else + echo "Unsupported Package Manager: yum, pacman, apt-get supported" >&2 + exit 2 + fi + `, + code: [0, 3], + }); + // Log information + log( + $status + ? { + message: "Service removed", + level: "WARN", + } + : { + message: "Service already removed", + level: "INFO", + } + ); + } catch (error) { + throw utils.error( + "NIKITA_SERVICE_REMOVE_INVALID_SERVICE", + `Invalid Service Name: ${config.name}` + ); } if (config.cache) { - log({ - message: 'Caching installed on "nikita:execute:installed"', - level: "INFO", - }); - state["nikita:execute:installed"] = installed; + log("INFO", 'Remove package from cache key in "nikita:service:packages:installed"'); + const packages = state["nikita:service:packages"]; + state["nikita:service:packages:installed"] = packages.splice( + packages.indexOf(config.name), + 1 + ); } }, metadata: { diff --git a/packages/service/lib/remove/schema.json b/packages/service/lib/remove/schema.json index 346c8b3e0..101e9aa1f 100644 --- a/packages/service/lib/remove/schema.json +++ b/packages/service/lib/remove/schema.json @@ -7,10 +7,10 @@ "description": "Run entirely from system cache to list installed and outdated\npackages." }, "cacheonly": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/cacheonly" + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/cacheonly" }, "name": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/name" + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/name" } }, "required": [ diff --git a/packages/service/lib/restart/index.js b/packages/service/lib/restart/index.js index 6378a90e3..6de09a66d 100644 --- a/packages/service/lib/restart/index.js +++ b/packages/service/lib/restart/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, parent: { state }, tools: { log } }) { log({ message: `Restart service ${config.name}`, diff --git a/packages/service/lib/restart/schema.json b/packages/service/lib/restart/schema.json index fd978290b..bde8d634f 100644 --- a/packages/service/lib/restart/schema.json +++ b/packages/service/lib/restart/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "name": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/name" + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/name" } }, "required": [ diff --git a/packages/service/lib/schema.json b/packages/service/lib/schema.json index b8c941b52..a3d0aab02 100644 --- a/packages/service/lib/schema.json +++ b/packages/service/lib/schema.json @@ -3,26 +3,20 @@ "type": "object", "properties": { "cache": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/cacheonly" + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/cacheonly" }, "cacheonly": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/cacheonly" + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/cacheonly" }, "chk_name": { "type": "string", "description": "Name used by the chkconfig utility, default to \"srv_name\" and \"name\"." }, - "installed": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/installed" - }, "name": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/name" - }, - "outdated": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/outdated" + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/name" }, "pacman_flags": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/pacman_flags" + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/pacman_flags" }, "srv_name": { "type": "string", diff --git a/packages/service/lib/start/index.js b/packages/service/lib/start/index.js index 505b2abc1..222ee26fb 100644 --- a/packages/service/lib/start/index.js +++ b/packages/service/lib/start/index.js @@ -1,9 +1,9 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { try { const { $status } = await this.execute({ diff --git a/packages/service/lib/start/schema.json b/packages/service/lib/start/schema.json index fd978290b..bde8d634f 100644 --- a/packages/service/lib/start/schema.json +++ b/packages/service/lib/start/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "name": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/name" + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/name" } }, "required": [ diff --git a/packages/service/lib/startup/index.js b/packages/service/lib/startup/index.js index 97f8f11ff..e507e94b2 100644 --- a/packages/service/lib/startup/index.js +++ b/packages/service/lib/startup/index.js @@ -1,9 +1,9 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { log({ message: `Startup service ${config.name}`, diff --git a/packages/service/lib/startup/schema.json b/packages/service/lib/startup/schema.json index 38c7ea714..c057d0c2e 100644 --- a/packages/service/lib/startup/schema.json +++ b/packages/service/lib/startup/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "name": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/name" + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/name" }, "startup": { "type": [ diff --git a/packages/service/lib/status/README.md b/packages/service/lib/status/README.md index 4fd9b97bf..1efb5f74a 100644 --- a/packages/service/lib/status/README.md +++ b/packages/service/lib/status/README.md @@ -6,14 +6,14 @@ Note, does not throw an error if service is not installed. ## Output -* `$status` +* `started` Indicates if the startup behavior has changed. ## Example ```js -const {$status} = await nikita.service.status([{ +const {started} = await nikita.service.status([{ name: 'gmetad' }) -console.info(`Service status: ${$status}`) +console.info(`Service status: ${started ? 'started' : 'stopped'}`) ``` diff --git a/packages/service/lib/status/index.js b/packages/service/lib/status/index.js index ab9bf937e..3e34d7ea8 100644 --- a/packages/service/lib/status/index.js +++ b/packages/service/lib/status/index.js @@ -1,40 +1,37 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { - log({ - message: `Status for service ${config.name}`, - level: "INFO", - }); - try { - const { $status } = await this.execute({ - command: dedent` - ls /lib/systemd/system/*.service /etc/systemd/system/*.service /etc/rc.d/* /etc/init.d/* 2>/dev/null | grep -w "${config.name}" || exit 3 - if command -v systemctl >/dev/null 2>&1; then - systemctl status ${config.name} || exit 3 - elif command -v service >/dev/null 2>&1; then - service ${config.name} status || exit 3 - else - echo "Unsupported Loader" >&2 - exit 2 - fi - `, - code: [0, 3], - }); - log({ - message: `Status for ${config.name} is ${ - $status ? "started" : "stoped" - }`, - level: "INFO", - }); - } catch (error) { + log("INFO", `Status for service ${config.name}`); + const { $status: started } = await this.execute({ + $shy: true, + command: dedent` + ls /lib/systemd/system/*.service /etc/systemd/system/*.service /etc/rc.d/* /etc/init.d/* 2>/dev/null | grep -w "${config.name}" || exit 3 + if command -v systemctl >/dev/null 2>&1; then + systemctl status ${config.name} || exit 3 + elif command -v service >/dev/null 2>&1; then + service ${config.name} status || exit 3 + else + echo "Unsupported Loader" >&2 + exit 2 + fi + `, + code: [0, 3], + }).catch(error => { if (error.exit_code === 2) { throw Error("Unsupported Loader"); } throw error; + }); + log( + "INFO", + `Service ${config.name} is ${started ? "started" : "stoped"}.` + ); + return { + started: started } }, metadata: { diff --git a/packages/service/lib/status/schema.json b/packages/service/lib/status/schema.json index fd978290b..bde8d634f 100644 --- a/packages/service/lib/status/schema.json +++ b/packages/service/lib/status/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "name": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/name" + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/name" } }, "required": [ diff --git a/packages/service/lib/stop/index.js b/packages/service/lib/stop/index.js index 0d56d86e6..c858a3b12 100644 --- a/packages/service/lib/stop/index.js +++ b/packages/service/lib/stop/index.js @@ -1,9 +1,9 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { log({ message: `Stop service ${config.name}`, diff --git a/packages/service/lib/stop/schema.json b/packages/service/lib/stop/schema.json index fd978290b..bde8d634f 100644 --- a/packages/service/lib/stop/schema.json +++ b/packages/service/lib/stop/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "name": { - "$ref": "module://@nikitajs/service/lib/install#/definitions/config/properties/name" + "$ref": "module://@nikitajs/service/install#/definitions/config/properties/name" } }, "required": [ diff --git a/packages/service/package.json b/packages/service/package.json index 966353df0..84f7dfc9e 100644 --- a/packages/service/package.json +++ b/packages/service/package.json @@ -1,5 +1,6 @@ { "name": "@nikitajs/service", + "version": "1.0.0-alpha.3", "description": "Provides Nikita actions for various service management operations.", "keywords": [ "nikita", @@ -13,7 +14,6 @@ "yum", "pacman" ], - "version": "1.0.0-alpha.3", "author": "David Worms ", "bugs": { "url": "https://github.com/adaltas/node-nikita/issues" @@ -58,20 +58,25 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + ".": "./lib/index.js", + "./register": "./lib/register.js", + "./*": "./lib/*/index.js" + }, "files": [ "/lib" ], "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/service/lib/register" - ], "inline-diffs": true, - "timeout": 20000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/service/register", + "should" + ], + "throw-deprecation": true, + "timeout": 50000 }, "publishConfig": { "access": "public" @@ -87,5 +92,6 @@ "test": "npm run test:local && npm run test:env", "test:env": "env/run.sh", "test:local": "mocha 'test/**/*.coffee'" - } + }, + "type": "module" } diff --git a/packages/service/test.sample.coffee b/packages/service/test.sample.coffee index 81c8c05f1..9c822ba7b 100644 --- a/packages/service/test.sample.coffee +++ b/packages/service/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: api: true service_install: false @@ -19,5 +19,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/service/test/assert.coffee b/packages/service/test/assert.coffee index 099e71aeb..23a4c7ca8 100644 --- a/packages/service/test/assert.coffee +++ b/packages/service/test/assert.coffee @@ -1,26 +1,26 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, service} = require './test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'service.assert', -> - describe 'installed', -> + describe 'installed.service', -> - @timeout 50000 - return unless tags.service_install + return unless test.tags.service_install they 'succeed if package is installed', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name - @service - name: service.name - @service.assert - name: service.name + await @service.remove + name: test.service.name + await @service + name: test.service.name + await @service.assert + name: test.service.name installed: true they 'fail if package isnt installed', ({ssh, sudo}) -> @@ -28,100 +28,104 @@ describe 'service.assert', -> $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name - @service.assert - name: service.name + await @service.remove + name: test.service.name + await @service.assert + name: test.service.name installed: true .should.be.rejectedWith - message: "Uninstalled Package: #{service.name}" + code: 'NIKITA_SERVICE_ASSERT_NOT_INSTALLED' + message: [ + 'NIKITA_SERVICE_ASSERT_NOT_INSTALLED:' + "service \"#{test.service.name}\" is not installed." + ].join(' ') - describe 'started', -> + describe 'started.systemctl', -> @timeout 50000 - return unless tags.service_systemctl + return unless test.tags.service_systemctl they 'succeed if service is started', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name - @service - name: service.name - @service.start - name: service.srv_name - @service.assert - name: service.srv_name + await @service.remove + name: test.service.name + await @service + name: test.service.name + await @service.start + name: test.service.srv_name + await @service.assert + name: test.service.srv_name started: true - @service.assert - name: service.srv_name + await @service.assert + name: test.service.srv_name started: false .should.be.rejectedWith - message: "Service Started: #{service.srv_name}" + message: "Service Started: #{test.service.srv_name}" they 'fail if service isnt started', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name - @service - name: service.name - @service.stop - name: service.srv_name - @service.assert - name: service.srv_name + await @service.remove + name: test.service.name + await @service + name: test.service.name + await @service.stop + name: test.service.srv_name + await @service.assert + name: test.service.srv_name started: false - @service.assert - name: service.srv_name + await @service.assert + name: test.service.srv_name started: true .should.be.rejectedWith - message: "Service Not Started: #{service.srv_name}" + message: "Service Not Started: #{test.service.srv_name}" - describe 'stopped', -> + describe 'stopped.systemctl', -> @timeout 50000 - return unless tags.service_systemctl + return unless test.tags.service_systemctl they 'succeed if service is started', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name - @service - name: service.name - @service.stop - name: service.srv_name - @service.assert - name: service.srv_name + await @service.remove + name: test.service.name + await @service + name: test.service.name + await @service.stop + name: test.service.srv_name + await @service.assert + name: test.service.srv_name stopped: true - @service.assert - name: service.srv_name + await @service.assert + name: test.service.srv_name stopped: false .should.be.rejectedWith - message: "Service Stopped: #{service.srv_name}" + message: "Service Stopped: #{test.service.srv_name}" they 'fail if service isnt started', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name - @service - name: service.name - @service.start - name: service.srv_name - @service.assert - name: service.srv_name + await @service.remove + name: test.service.name + await @service + name: test.service.name + await @service.start + name: test.service.srv_name + await @service.assert + name: test.service.srv_name stopped: false - @service.assert - name: service.srv_name + await @service.assert + name: test.service.srv_name stopped: true .should.be.rejectedWith - message: "Service Not Stopped: #{service.srv_name}" + message: "Service Not Stopped: #{test.service.srv_name}" diff --git a/packages/service/test/crond-systemd.hbs b/packages/service/test/crond-systemd.hbs deleted file mode 100644 index 30fa7fea3..000000000 --- a/packages/service/test/crond-systemd.hbs +++ /dev/null @@ -1,12 +0,0 @@ -[Unit] -Description={{description}} -After=auditd.service systemd-user-sessions.service time-sync.target - -[Service] -EnvironmentFile=/etc/sysconfig/crond -ExecStart=/usr/sbin/crond -n $CRONDARGS -ExecReload=/bin/kill -HUP $MAINPID -KillMode=process - -[Install] -WantedBy=multi-user.target diff --git a/packages/service/test/crond.hbs b/packages/service/test/crond.hbs deleted file mode 100644 index 55fa23570..000000000 --- a/packages/service/test/crond.hbs +++ /dev/null @@ -1,131 +0,0 @@ -#!/bin/sh -# -# crond Start/Stop the cron clock daemon. -# -# chkconfig: 2345 90 60 -# description: cron is a standard UNIX program that runs user-specified \ -# programs at periodic scheduled times. vixie cron adds a \ -# number of features to the basic UNIX cron, including better \ -# security and more powerful configuration options. - -### BEGIN INIT INFO -# Provides: crond crontab -# Required-Start: $local_fs $syslog -# Required-Stop: $local_fs $syslog -# Default-Start: 2345 -# Default-Stop: 90 -# Short-Description: run cron daemon -# Description: cron is a standard UNIX program that runs user-specified -# programs at periodic scheduled times. vixie cron adds a -# number of features to the basic UNIX cron, including better -# security and more powerful configuration options. -### END INIT INFO - -[ -f /etc/sysconfig/crond ] || { - [ "$1" = "status" ] && exit 4 || exit 6 -} - -RETVAL=0 -prog="crond" -exec=/usr/sbin/crond -lockfile=/var/lock/subsys/crond -config=/etc/sysconfig/crond - -# Source function library. -. /etc/rc.d/init.d/functions - -[ $UID -eq 0 ] && [ -e /etc/sysconfig/$prog ] && . /etc/sysconfig/$prog - -start() { - if [ $UID -ne 0 ] ; then - echo "User has insufficient privilege." - exit 4 - fi - [ -x $exec ] || exit 5 - [ -f $config ] || exit 6 - echo -n $"Starting $prog: " - daemon $prog $CRONDARGS - retval=$? - echo - [ $retval -eq 0 ] && touch $lockfile -} - -stop() { - if [ $UID -ne 0 ] ; then - echo "User has insufficient privilege." - exit 4 - fi - echo -n $"Stopping $prog: " - if [ -n "`pidfileofproc $exec`" ]; then - killproc $exec - RETVAL=3 - else - failure $"Stopping $prog" - fi - retval=$? - echo - [ $retval -eq 0 ] && rm -f $lockfile -} - -restart() { - rh_status_q && stop - start -} - -reload() { - echo -n $"Reloading $prog: " - if [ -n "`pidfileofproc $exec`" ]; then - killproc $exec -HUP - else - failure $"Reloading $prog" - fi - retval=$? - echo -} - -force_reload() { - # new configuration takes effect after restart - restart -} - -rh_status() { - # run checks to determine if the service is running or use generic status - status -p /var/run/crond.pid $prog -} - -rh_status_q() { - rh_status >/dev/null 2>&1 -} - - -case "$1" in - start) - rh_status_q && exit 0 - $1 - ;; - stop) - rh_status_q || exit 0 - $1 - ;; - restart) - $1 - ;; - reload) - rh_status_q || exit 7 - $1 - ;; - force-reload) - force_reload - ;; - status) - rh_status - ;; - condrestart|try-restart) - rh_status_q || exit 0 - restart - ;; - *) - echo $"Usage: $0 {start|stop|status|restart|condrestart|try-restart|reload|force-reload}" - exit 2 -esac -exit $? diff --git a/packages/service/test/discover.coffee b/packages/service/test/discover.coffee index 7609877a3..ad4441e43 100644 --- a/packages/service/test/discover.coffee +++ b/packages/service/test/discover.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.service_install +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'service.discover', -> + return unless test.tags.service_install they 'output loader', ({ssh}) -> nikita diff --git a/packages/service/test/index.config.startup.coffee b/packages/service/test/index.config.startup.coffee index ea71fa532..f56d6ce80 100644 --- a/packages/service/test/index.config.startup.coffee +++ b/packages/service/test/index.config.startup.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, service} = require './test' -they = require('mocha-they')(config) - -return unless tags.service_startup or tags.service_systemctl +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'service#config.startup', -> + return unless test.tags.service_startup or test.tags.service_systemctl describe 'schema', -> @@ -33,16 +33,16 @@ describe 'service#config.startup', -> $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name + await @service.remove + name: test.service.name {$status} = await @service - name: service.name - chk_name: service.chk_name + name: test.service.name + chk_name: test.service.chk_name startup: true $status.should.be.true() {$status} = await @service - name: service.name - chk_name: service.chk_name + name: test.service.name + chk_name: test.service.chk_name startup: true $status.should.be.false() @@ -51,16 +51,16 @@ describe 'service#config.startup', -> $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name + await @service.remove + name: test.service.name {$status} = await @service - name: service.name - chk_name: service.chk_name + name: test.service.name + chk_name: test.service.chk_name startup: false $status.should.be.true() {$status} = await @service - name: service.name - chk_name: service.chk_name + name: test.service.name + chk_name: test.service.chk_name startup: false $status.should.be.false() @@ -76,15 +76,15 @@ describe 'service#config.startup', -> , -> {$status} = await @execute 'command -v chkconfig', code: [0, 127], $relax: true return unless $status - @service.remove - name: service.name + await @service.remove + name: test.service.name {$status} = await @service - name: service.name - chk_name: service.chk_name + name: test.service.name + chk_name: test.service.chk_name startup: '235' $status.should.be.true() {$status} = await @service - chk_name: service.chk_name + chk_name: test.service.chk_name startup: '235' $status.should.be.false() @@ -97,14 +97,14 @@ describe 'service#config.startup', -> $sudo: sudo $if_exec: 'command -v chkconfig && ! command -v systemctl' , -> - @service.remove - name: service.name + await @service.remove + name: test.service.name {$status} = await @service - name: service.name - chk_name: service.chk_name + name: test.service.name + chk_name: test.service.chk_name startup: '2345' $status.should.be.true() {$status} = await @service - chk_name: service.chk_name + chk_name: test.service.chk_name startup: '2345' $status.should.be.false() diff --git a/packages/service/test/index.config.state.coffee b/packages/service/test/index.config.state.coffee index a61f4ad73..f2c182254 100644 --- a/packages/service/test/index.config.state.coffee +++ b/packages/service/test/index.config.state.coffee @@ -1,18 +1,19 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, service} = require './test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'service#config.state', -> describe 'schema', -> - return unless tags.api + return unless test.tags.api it 'fail on invalid state', -> nikita .service - name: service.name - srv_name: service.srv_name + name: test.service.name + srv_name: test.service.srv_name state: 'invalidstate' .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' @@ -23,7 +24,7 @@ describe 'service#config.state', -> 'allowedValues is ["started","stopped","restarted"].' ].join ' ' - it.only 'requires config `name`, `srv_name` or `chk_name`', -> + it 'requires config `name`, `srv_name` or `chk_name`', -> nikita .service state: 'started' @@ -41,15 +42,15 @@ describe 'service#config.state', -> it 'split multiple states', -> nikita .service - name: service.name - srv_name: service.srv_name + name: test.service.name + srv_name: test.service.srv_name state: 'started,stopped' , ({config}) -> config.state .should.be.fulfilledWith ['started', 'stopped'] describe 'action', -> - return unless tags.service_systemctl + return unless test.tags.service_systemctl @timeout 30000 @@ -57,18 +58,18 @@ describe 'service#config.state', -> nikita $ssh: ssh , -> - @service.remove - name: service.name + await @service.remove + name: test.service.name {$status} = await @service - name: service.name - srv_name: service.srv_name + name: test.service.name + srv_name: test.service.srv_name state: 'started' $status.should.be.true() - {$status} = await @service.status - name: service.srv_name - $status.should.be.true() + {started} = await @service.status + name: test.service.srv_name + started.should.be.true() {$status} = await @service # Detect already started - srv_name: service.srv_name + srv_name: test.service.srv_name state: 'started' $status.should.be.false() @@ -76,18 +77,18 @@ describe 'service#config.state', -> nikita $ssh: ssh , -> - @service.remove - name: service.name + await @service.remove + name: test.service.name {$status} = await @service - name: service.name - srv_name: service.srv_name + name: test.service.name + srv_name: test.service.srv_name state: 'stopped' $status.should.be.true() - {$status} = await @service.status - name: service.srv_name - $status.should.be.false() + {started} = await @service.status + name: test.service.srv_name + started.should.be.false() {$status} = await @service # Detect already stopped - srv_name: service.srv_name + srv_name: test.service.srv_name state: 'stopped' $status.should.be.false() @@ -95,20 +96,20 @@ describe 'service#config.state', -> nikita $ssh: ssh , -> - @service.remove - name: service.name - @service - name: service.name - srv_name: service.srv_name + await @service.remove + name: test.service.name + await @service + name: test.service.name + srv_name: test.service.srv_name state: 'started' {$status} = await @service - srv_name: service.srv_name + srv_name: test.service.srv_name state: 'restarted' $status.should.be.true() - @service.stop - name: service.srv_name + await @service.stop + name: test.service.srv_name {$status} = await @service - srv_name: service.srv_name + srv_name: test.service.srv_name state: 'restarted' $status.should.be.false() @@ -116,15 +117,15 @@ describe 'service#config.state', -> nikita $ssh: ssh , -> - @service.remove - name: service.name + await @service.remove + name: test.service.name {$status} = await @service - name: service.name - srv_name: service.srv_name + name: test.service.name + srv_name: test.service.srv_name state: 'stopped,started,restarted' $status.should.be.true() {$status} = await @service - name: service.name - srv_name: service.srv_name + name: test.service.name + srv_name: test.service.srv_name state: ['stopped', 'started', 'restarted'] $status.should.be.true() diff --git a/packages/service/test/init.coffee b/packages/service/test/init.coffee index ccc880bba..9b99f1a67 100644 --- a/packages/service/test/init.coffee +++ b/packages/service/test/init.coffee @@ -1,88 +1,121 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) -return unless tags.service_systemctl +write_srv = ({config}) -> + @file + content: ''' + #!/bin/sh + # Provides: srv + # Description: {{description}} + pid_file=/var/run/nikita_test + start() { + ( status ) && exit 0; + nc -l 13370 1>/dev/null 2>/dev/null & + echo $! > $pid_file + } -describe 'service.init', -> - - @timeout 60000 + stop() { + ( status ) || exit 0; + kill `cat $pid_file` + rm -rf $pid_file + } + status() { + [ ! -e $pid_file ] && exit 1; + pid=`cat $pid_file` + kill -0 $pid + } + case "$1" in + start) $1 ;; + stop) $1 ;; + status) $1 ;; + *) echo $"Usage: $0 {start|stop|status}"; exit 2 + esac + exit $? + ''' + , + config + +describe 'service.init.service', -> + return unless test.tags.service_install - they 'init file with target and source (default)', ({ssh}) -> + they 'init file with target and source (default)', ({ssh, sudo}) -> nikita $ssh: ssh + $sudo: sudo $tmpdir: true , ({metadata: {tmpdir}}) -> - @service.remove 'cronie' - @fs.remove - target: '/etc/init.d/crond' - @service.init - source: "#{__dirname}/crond.hbs" - target: '/etc/init.d/crond' - @fs.assert '/etc/init.d/crond' + await @fs.remove "#{tmpdir}/source/srv" + await @call write_srv, target: "#{tmpdir}/source/srv" + await @service.init + source: "#{tmpdir}/source/srv" + target: '/etc/init.d/srv' + await @fs.assert '/etc/init.d/srv' - they 'init file with source only (default)', ({ssh}) -> + they 'init file with source only (default)', ({ssh, sudo}) -> nikita $ssh: ssh - , -> - @service.remove 'cronie' - @fs.remove - target: '/etc/init.d/crond' - @service.init - source: "#{__dirname}/crond.hbs" - @fs.assert '/etc/init.d/crond' + $sudo: sudo + $tmpdir: true + , ({metadata: {tmpdir}}) -> + await @fs.remove '/etc/init.d/srv' + await @call write_srv, target: "#{tmpdir}/source/srv" + await @service.init + source: "#{tmpdir}/source/srv" + await @fs.assert '/etc/init.d/srv' - they 'init file with source and name (default)', ({ssh}) -> + they 'init file with source and name (default)', ({ssh, sudo}) -> nikita $ssh: ssh - , -> - @service.remove 'cronie' - @fs.remove - target: '/etc/init.d/crond' - @service.init - source: "#{__dirname}/crond.hbs" - name: 'crond-name' - @fs.assert '/etc/init.d/crond-name' - - describe 'daemon-reload', -> + $sudo: sudo + $tmpdir: true + , ({metadata: {tmpdir}}) -> + await @fs.remove '/etc/init.d/srv_new' + await @call write_srv, target: "#{tmpdir}/source/srv" + await @service.init + source: "#{tmpdir}/source/srv" + name: 'srv_new' + await @fs.assert '/etc/init.d/srv_new' - they 'with systemctl sysv-generator', ({ssh}) -> - nikita - $ssh: ssh - $if_os: name: ['redhat','centos'], version: '7' - , -> - @service.remove 'cronie' - @service.install 'cronie' - @fs.remove - target: '/etc/init.d/crond' - @execute - command: 'systemctl daemon-reload;systemctl reset-failed' - @service.init - source: "#{__dirname}/crond.hbs" - name: 'crond' - @fs.assert '/etc/init.d/crond' - @service.start - name: 'crond' - @service.start - name: 'stop' +describe 'service.init.systemctl', -> + return unless test.tags.service_systemctl + + they 'with systemctl systemd script', ({ssh, sudo}) -> + nikita + $ssh: ssh + $if_os: name: ['redhat','centos'], version: '7' + $sudo: sudo + $tmpdir: true + , ({metadata: {tmpdir}}) -> + await @service.remove 'cronie' + await @service.install 'cronie' + await @fs.remove + target: '/etc/init.d/crond' + await @fs.remove + target: '/usr/lib/systemd/system/crond.service' + await @file + content: ''' + [Unit] + Description={{description}} + After=auditd.service systemd-user-sessions.service time-sync.target + + [Service] + EnvironmentFile=/etc/sysconfig/crond + ExecStart=/usr/sbin/crond -n $CRONDARGS + ExecReload=/bin/kill -HUP $MAINPID + KillMode=process - they 'with systemctl systemd script', ({ssh}) -> - nikita - $ssh: ssh - $if_os: name: ['redhat','centos'], version: '7' - , -> - @service.remove 'cronie' - @service.install 'cronie' - @fs.remove - target: '/etc/init.d/crond' - @fs.remove - target: '/usr/lib/systemd/system/crond.service' - {$status} = await @service.init - source: "#{__dirname}/crond-systemd.hbs" - context: description: 'Command Scheduler Test 1' - target: '/usr/lib/systemd/system/crond.service' - $status.should.be.true() - @fs.assert '/usr/lib/systemd/system/crond.service' - @service.start - name: 'crond' + [Install] + WantedBy=multi-user.target + ''' + target: "#{tmpdir}/crond-systemd.hbs" + {$status} = await @service.init + source: "#{tmpdir}/crond-systemd.hbs" + context: description: 'Command Scheduler Test 1' + target: '/usr/lib/systemd/system/crond.service' + $status.should.be.true() + await @fs.assert '/usr/lib/systemd/system/crond.service' + await @service.start + name: 'crond' diff --git a/packages/service/test/install.arch.coffee b/packages/service/test/install.arch.coffee new file mode 100644 index 000000000..3acc4b33d --- /dev/null +++ b/packages/service/test/install.arch.coffee @@ -0,0 +1,39 @@ + +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) + +describe 'service.install.arch', -> + + return unless test.tags.service_install_arch + + they 'add pacman options', ({ssh, sudo}) -> + message = null + nikita + $ssh: ssh + $sudo: sudo + , ({tools: {events}}) -> + events.on 'stdin', (log) -> message = log.message + await @service.remove + name: test.service.name + await @service.install + name: test.service.name + pacman_flags: ['u', 'y'] + await @call -> + message.should.containEql "pacman --noconfirm -S #{test.service.name} -u -y" + + they 'add yay options', ({ssh, sudo}) -> + message = null + nikita + $ssh: ssh + $sudo: sudo + , ({tools: {events}}) -> + events.on 'stdin', (log) -> message = log.message + await @service.remove + name: test.service.name + await @service.install + name: test.service.name + yay_flags: ['u', 'y'] + await @call -> + message.should.containEql "yay --noconfirm -S #{test.service.name} -u -y" diff --git a/packages/service/test/install.coffee b/packages/service/test/install.coffee index 07bdb10ac..91325ab0e 100644 --- a/packages/service/test/install.coffee +++ b/packages/service/test/install.coffee @@ -1,23 +1,21 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, service} = require './test' -they = require('mocha-they')(config) - -return unless tags.service_install +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'service.install', -> - - @timeout 50000 + return unless test.tags.service_install they 'new package', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name + await @service.remove + name: test.service.name {$status} = await @service - name: service.name + name: test.service.name $status.should.be.true() they 'already installed packages', ({ssh, sudo}) -> @@ -25,12 +23,12 @@ describe 'service.install', -> $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name - @service - name: service.name + await @service.remove + name: test.service.name + await @service + name: test.service.name {$status} = await @service - name: service.name + name: test.service.name $status.should.be.false() they 'name as default argument', ({ssh, sudo}) -> @@ -38,25 +36,28 @@ describe 'service.install', -> $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name - {$status} = await @service service.name + await @service.remove + name: test.service.name + {$status} = await @service test.service.name $status.should.be.true() - they 'cache', ({ssh}) -> + they.skip 'update package list in cache', ({ssh, sudo}) -> + # Cache is not yet implemented, it needs some reflexion on how to make this + # work accross multiple ssh connections nikita $ssh: ssh + $sudo: sudo , -> - @service.remove - name: service.name - @call ({parent: {state}}) -> - (state['nikita:execute:installed'] is undefined).should.be.true() + await @service.remove + name: test.service.name + await @call ({parent: {state}}) -> + (state['nikita:service:packages:installed'] is undefined).should.be.true() {$status} = await @service - name: service.name + name: test.service.name cache: true $status.should.be.true() - @call ({parent: {state}}) -> - state['nikita:execute:installed'].should.containEql service.name + await @call ({parent: {state}}) -> + state['nikita:service:packages:installed'].should.containEql test.service.name they 'throw error if not exists', ({ssh, sudo}) -> nikita.service.install @@ -68,7 +69,7 @@ describe 'service.install', -> message: [ 'NIKITA_SERVICE_INSTALL:' 'failed to install package,' - 'name is `thisservicedoesnotexist`' + 'name is "thisservicedoesnotexist"' ].join ' ' they 'option `code`', ({ssh, sudo}) -> @@ -80,37 +81,3 @@ describe 'service.install', -> name: 'thisservicedoesnotexist' code: [0, [1, 100]] # 1 for RHEL, 100 for Ubuntu $status.should.be.false() - -describe 'service.install arch', -> - - return unless tags.service_install_arch - - they 'add pacman options', ({ssh, sudo}) -> - message = null - nikita - $ssh: ssh - $sudo: sudo - , ({tools: {events}}) -> - events.on 'stdin', (log) -> message = log.message - @service.remove - name: service.name - @service.install - name: service.name - pacman_flags: ['u', 'y'] - @call -> - message.should.containEql "pacman --noconfirm -S #{service.name} -u -y" - - they 'add yay options', ({ssh, sudo}) -> - message = null - nikita - $ssh: ssh - $sudo: sudo - , ({tools: {events}}) -> - events.on 'stdin', (log) -> message = log.message - @service.remove - name: service.name - @service.install - name: service.name - yay_flags: ['u', 'y'] - @call -> - message.should.containEql "yay --noconfirm -S #{service.name} -u -y" diff --git a/packages/service/test/installed.coffee b/packages/service/test/installed.coffee new file mode 100644 index 000000000..9538043cd --- /dev/null +++ b/packages/service/test/installed.coffee @@ -0,0 +1,58 @@ + +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) + +describe 'service.installed', -> + return unless test.tags.service_install + + they 'list all packages', ({ssh}) -> + {packages, installed} = await nikita + $ssh: ssh + .service.installed + cache: true + should(installed).be.undefined() + packages.should.matchEach (it) -> it.should.be.a.String() + + they 'test installed package', ({ssh, sudo}) -> + nikita + $ssh: ssh + $sudo: sudo + .service.remove + name: test.service.name + .service.install + name: test.service.name + .service.installed + name: test.service.name + .call ({sibling: {output: {packages, installed}}}) -> + should(packages).be.undefined() + installed.should.be.true() + .service.remove + name: test.service.name + + they 'test not-installed package', ({ssh}) -> + {packages, installed} = await nikita + $ssh: ssh + .service.installed + name: 'XXXX' + cache: true + should(packages).be.undefined() + installed.should.be.false() + + they 'cache package list', ({ssh, sudo}) -> + nikita + $ssh: ssh + $sudo: sudo + , -> + await @service.remove + name: test.service.name + await @call ({parent: {state}}) -> + should(state['nikita:service:packages:installed']).be.undefined() + {$status} = await @service.installed + name: test.service.name + cache: true + $status.should.be.false() + await @call ({parent: {state}}) -> + state['nikita:service:packages:installed'].should.be.an.Array() + state['nikita:service:packages:installed'].should.not.containEql test.service.name diff --git a/packages/service/test/outdated.coffee b/packages/service/test/outdated.coffee new file mode 100644 index 000000000..a37497184 --- /dev/null +++ b/packages/service/test/outdated.coffee @@ -0,0 +1,50 @@ + +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) + +describe 'service.outdated', -> + return unless test.tags.service_outdated + + they 'list all packages', ({ssh, sudo}) -> + {packages, outdated} = await nikita + $ssh: ssh + $sudo: sudo + .service.outdated + cache: true + should(outdated).be.undefined() + packages.should.matchEach (it) -> it.should.be.a.String() + + they 'test outdated package', ({ssh, sudo}) -> + {packages, outdated} = await nikita + $ssh: ssh + $sudo: sudo + .service.outdated + name: test.service.name + should(packages).be.undefined() + outdated.should.be.true() + + they 'test not-installed package', ({ssh, sudo}) -> + {packages, outdated} = await nikita + $ssh: ssh + $sudo: sudo + .service.outdated + name: 'XXXX' + should(packages).be.undefined() + outdated.should.be.false() + + they 'cache package list', ({ssh, sudo}) -> + nikita + $ssh: ssh + $sudo: sudo + , -> + await @call ({parent: {state}}) -> + should(state['nikita:service:packages:outdated']).be.undefined() + {$status} = await @service.outdated + name: test.service.name + cache: true + $status.should.be.false() + await @call ({parent: {state}}) -> + state['nikita:service:packages:outdated'].should.be.an.Array() + \ No newline at end of file diff --git a/packages/service/test/remove.coffee b/packages/service/test/remove.coffee index 8b554d651..d3e31d3b4 100644 --- a/packages/service/test/remove.coffee +++ b/packages/service/test/remove.coffee @@ -1,24 +1,31 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, service} = require './test' -they = require('mocha-they')(config) - -return unless tags.service_install +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'service.remove', -> - - @timeout 20000 + return unless test.tags.service_install + + they 'package is not installed', ({ssh, sudo}) -> + {$status} = await nikita + $ssh: ssh + $sudo: sudo + .service.remove + name: 'XXXX' + $status.should.be.false() + they 'new package', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @service.install - name: service.name + await @service.install + name: test.service.name {$status} = await @service.remove - name: service.name + name: test.service.name $status.should.be.true() {$status} = await @service.remove - name: service.name + name: test.service.name $status.should.be.false() diff --git a/packages/service/test/restart.coffee b/packages/service/test/restart.coffee index 5b4978c71..762aad64b 100644 --- a/packages/service/test/restart.coffee +++ b/packages/service/test/restart.coffee @@ -1,22 +1,20 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, service} = require './test' -they = require('mocha-they')(config) - -return unless tags.service_systemctl +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'service.restart', -> - - @timeout 20000 + return unless test.tags.service_systemctl they 'should restart', ({ssh}) -> nikita $ssh: ssh , -> - @service - name: service.name - @service.start - name: service.srv_name + await @service + name: test.service.name + await @service.start + name: test.service.srv_name {$status} = await @service.restart - name: service.srv_name + name: test.service.srv_name $status.should.be.true() diff --git a/packages/service/test/start.coffee b/packages/service/test/start.coffee index 7e6405460..7b113c22b 100644 --- a/packages/service/test/start.coffee +++ b/packages/service/test/start.coffee @@ -1,30 +1,28 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, service} = require './test' -they = require('mocha-they')(config) - -return unless tags.service_systemctl +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'service.start', -> - - @timeout 20000 + return unless test.tags.service_systemctl they 'should start', ({ssh}) -> nikita $ssh: ssh , -> - @service - name: service.name - @service.stop - name: service.srv_name + await @service + name: test.service.name + await @service.stop + name: test.service.srv_name {$status} = await @service.start - name: service.srv_name - $status.should.be.true() - {$status} = await @service.status - name: service.srv_name + name: test.service.srv_name $status.should.be.true() + {started} = await @service.status + name: test.service.srv_name + started.should.be.true() {$status} = await @service.start # Detect already started - name: service.srv_name + name: test.service.srv_name $status.should.be.false() they 'no error when invalid service name', ({ssh}) -> diff --git a/packages/service/test/startup.coffee b/packages/service/test/startup.coffee index 808233c76..1ba170d12 100644 --- a/packages/service/test/startup.coffee +++ b/packages/service/test/startup.coffee @@ -1,13 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, service} = require './test' -they = require('mocha-they')(config) - -return unless tags.service_startup +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'service.startup', -> - - @timeout 30000 + return unless test.tags.service_startup describe 'startup', -> @@ -16,26 +14,26 @@ describe 'service.startup', -> $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name + await @service.remove + name: test.service.name {$status} = await @service - name: service.name - chk_name: service.chk_name + name: test.service.name + chk_name: test.service.chk_name startup: true $status.should.be.true() {$status} = await @service - name: service.name - chk_name: service.chk_name + name: test.service.name + chk_name: test.service.chk_name startup: true $status.should.be.false() {$status} = await @service - name: service.name - chk_name: service.chk_name + name: test.service.name + chk_name: test.service.chk_name startup: false $status.should.be.true() {$status} = await @service - name: service.name - chk_name: service.chk_name + name: test.service.name + chk_name: test.service.chk_name startup: false $status.should.be.false() @@ -44,11 +42,11 @@ describe 'service.startup', -> $ssh: ssh $sudo: sudo , -> - @service.remove - name: service.name - @service.install service.name - @service.startup + await @service.remove + name: test.service.name + await @service.install test.service.name + await @service.startup startup: false - name: service.chk_name - {$status} = await @service.startup service.chk_name + name: test.service.chk_name + {$status} = await @service.startup test.service.chk_name $status.should.be.true() diff --git a/packages/service/test/status.coffee b/packages/service/test/status.coffee index d3243d34c..dcb19a5ea 100644 --- a/packages/service/test/status.coffee +++ b/packages/service/test/status.coffee @@ -1,33 +1,31 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, service} = require './test' -they = require('mocha-they')(config) - -return unless tags.service_systemctl +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'service.status', -> - - @timeout 20000 + return unless test.tags.service_systemctl they 'store status', ({ssh}) -> nikita $ssh: ssh , -> - @service - name: service.name - @service.stop - name: service.srv_name - {$status} = await @service.status - name: service.srv_name - $status.should.be.false() - @service.start - name: service.srv_name - {$status} = await @service.status - name: service.srv_name - $status.should.be.true() - @service.stop - name: service.srv_name - {$status} = await @service.status - name: service.name - srv_name: service.srv_name - $status.should.be.false() + await @service + name: test.service.name + await @service.stop + name: test.service.srv_name + {started} = await @service.status + name: test.service.srv_name + started.should.be.false() + await @service.start + name: test.service.srv_name + {started} = await @service.status + name: test.service.srv_name + started.should.be.true() + await @service.stop + name: test.service.srv_name + {started} = await @service.status + name: test.service.name + srv_name: test.service.srv_name + started.should.be.false() diff --git a/packages/service/test/stop.coffee b/packages/service/test/stop.coffee index fea0cc324..e874f1fca 100644 --- a/packages/service/test/stop.coffee +++ b/packages/service/test/stop.coffee @@ -1,23 +1,21 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, service} = require './test' -they = require('mocha-they')(config) - -return unless tags.service_systemctl +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'service.stop', -> - - @timeout 20000 + return unless test.tags.service_systemctl they 'should stop', ({ssh}) -> nikita $ssh: ssh , -> - @service.install service.name - @service.start service.srv_name - {$status} = await @service.stop service.srv_name + await @service.install test.service.name + await @service.start test.service.srv_name + {$status} = await @service.stop test.service.srv_name $status.should.be.true() - {$status} = await @service.stop service.srv_name + {$status} = await @service.stop test.service.srv_name $status.should.be.false() they 'no error when invalid service name', ({ssh}) -> diff --git a/packages/service/test/test.coffee b/packages/service/test/test.coffee index 43f457203..ccc602447 100644 --- a/packages/service/test/test.coffee +++ b/packages/service/test/test.coffee @@ -1,29 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config - -nikita = require '@nikitajs/core/lib' -they = require('mocha-they')(config.config) - -they 'cache to avoid timeout later', ({ssh, sudo}) -> - @timeout 50000 - nikita( - $ssh: ssh - $sudo: sudo - ).execute ''' - if command -v yum; then - yum update -y - yum check-update -q - fi - ''' +export default config.default diff --git a/packages/system/README.md b/packages/system/README.md index b74c08aa6..fd2b0a61b 100644 --- a/packages/system/README.md +++ b/packages/system/README.md @@ -2,3 +2,17 @@ # Nikita "system" package The "system" package provides Nikita actions for various system management operations. + +## Usage + +```js +import "@nikitajs/system/register"; +import nikita from "@nikitajs/core"; + +const {$status} = await nikita.system.user({ + username: "gollum", + shell: "/bin/bash", + system: true, +}); +console.info("User was modified:", $status); +``` diff --git a/packages/system/env/cgroups-multipass/run.sh b/packages/system/env/cgroups-multipass/run.sh index 5fc7d13e4..53871b8a8 100755 --- a/packages/system/env/cgroups-multipass/run.sh +++ b/packages/system/env/cgroups-multipass/run.sh @@ -3,25 +3,27 @@ set -e cd `pwd`/`dirname ${BASH_SOURCE}` -multipass launch \ - --name nikita-system-cgroup \ - --cpus 2 \ - --memory 10G \ - --disk 20G \ - release:18.04 -NIKITA_HOME=`node -e "process.stdout.write(path.join(process.cwd(), '../../../..'))"` -# Fix DNS -multipass exec nikita-system-cgroup -- bash <> /etc/systemd/resolved.conf" systemctl restart systemd-resolved EOF -# Allow mounting directories -multipass exec nikita-system-cgroup -- sudo apt upgrade -y -multipass exec nikita-system-cgroup -- sudo snap install multipass-sshfs -multipass mount $NIKITA_HOME nikita-system-cgroup:/nikita -# Install Node.js -multipass exec nikita-system-cgroup bash <<'EOF' + # Allow mounting directories + multipass exec nikita-system-cgroup -- sudo apt upgrade -y + multipass exec nikita-system-cgroup -- sudo snap install multipass-sshfs + multipass mount $NIKITA_HOME nikita-system-cgroup:/nikita + # Install Node.js + multipass exec nikita-system-cgroup bash <<'EOF' if command -v node ; then exit 42; fi curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash # NVM is sourced from ~/.bashrc which is not loaded in non interactive mode @@ -29,8 +31,8 @@ echo '. $HOME/.nvm/nvm.sh' >> $HOME/.profile . $HOME/.profile nvm install 16 EOF -# Configure SSH -multipass exec nikita-system-cgroup bash <<'EOF' + # Configure SSH + multipass exec nikita-system-cgroup bash <<'EOF' mkdir -p $HOME/.ssh && chmod 700 $HOME/.ssh if [ ! -f $HOME/.ssh/id_ed25519 ]; then ssh-keygen -t ed25519 -f $HOME/.ssh/id_ed25519 -N '' @@ -38,12 +40,20 @@ cat $HOME/.ssh/id_ed25519.pub >> $HOME/.ssh/authorized_keys # sudo bash -c "cat $HOME/.ssh/id_ed25519.pub >> /root/.ssh/authorized_keys" fi EOF -# Install test dependencies -multipass exec nikita-system-cgroup bash < - await @lxc.exec - $header: 'Node.js' - container: config.container - command: ''' - if command -v node ; then exit 42; fi - curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash - . ~/.bashrc - nvm install 20 - # NVM is sourced from ~/.bashrc which is not loaded in non interactive mode - echo '. /root/.nvm/nvm.sh' >> /root/.profile - ''' - trap: true - code: [0, 42] - await @lxc.exec - $header: 'SSH keys' - container: config.container - command: """ - mkdir -p /root/.ssh && chmod 700 /root/.ssh - if [ ! -f /root/.ssh/id_ed25519 ]; then - ssh-keygen -t ed25519 -f /root/.ssh/id_ed25519 -N '' - cat /root/.ssh/id_ed25519.pub > /root/.ssh/authorized_keys - fi - """ - trap: true - await @lxc.exec - $header: 'Package' - container: config.container - # command: 'yum install -y libcgroup-tools' - command: 'apt update -y && apt install -y cgroup-tools' - await @lxc.exec - $header: 'cgroup configuration' - container: config.container - # Ubuntu specific, centos/7 didn't require it - command: 'cp -pr /usr/share/doc/cgroup-tools/examples/cgsnapshot_blacklist.conf /etc/cgsnapshot_blacklist.conf' -.catch (err) -> - console.error err diff --git a/packages/system/env/cgroups/index.js b/packages/system/env/cgroups/index.js new file mode 100644 index 000000000..d5d853564 --- /dev/null +++ b/packages/system/env/cgroups/index.js @@ -0,0 +1,82 @@ +import path from "node:path"; +import dedent from "dedent"; +import runner from "@nikitajs/lxd-runner"; +const __dirname = new URL(".", import.meta.url).pathname; + +runner({ + cwd: "/nikita/packages/system", + container: "nikita-system-cgroups", + logdir: path.resolve(__dirname, "./logs"), + cluster: { + containers: { + "nikita-system-cgroups": { + vm: true, + image: "images:ubuntu/20.04", + properties: { + "environment.NIKITA_TEST_MODULE": + "/nikita/packages/system/env/cgroups/test.coffee", + "raw.idmap": process.env["NIKITA_LXD_IN_VAGRANT"] + ? "both 1000 0" + : `both ${process.getuid()} 0`, + }, + disk: { + nikitadir: { + path: "/nikita", + source: + process.env["NIKITA_HOME"] || + path.join(__dirname, "../../../../"), + }, + }, + ssh: { + enabled: false, + }, + }, + }, + provision_container: async function ({ config }) { + await this.lxc.exec({ + $header: "Node.js", + container: config.container, + command: dedent` + if command -v node ; then exit 42; fi + curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash + . ~/.bashrc + nvm install 20 + # NVM is sourced from ~/.bashrc which is not loaded in non interactive mode + echo '. /root/.nvm/nvm.sh' >> /root/.profile + `, + trap: true, + code: [0, 42], + }); + await this.lxc.exec({ + $header: "SSH keys", + container: config.container, + command: dedent` + mkdir -p /root/.ssh && chmod 700 /root/.ssh + if [ ! -f /root/.ssh/id_ed25519 ]; then + ssh-keygen -t ed25519 -f /root/.ssh/id_ed25519 -N '' + cat /root/.ssh/id_ed25519.pub > /root/.ssh/authorized_keys + fi + `, + trap: true, + }); + await this.lxc.exec({ + $header: "Package", + container: config.container, + // command: 'yum install -y libcgroup-tools' + command: "apt update -y && apt install -y cgroup-tools", + }); + return await this.lxc.exec({ + $header: "cgroup configuration", + container: config.container, + // Ubuntu specific, centos/7 didn't require it + command: dedent` + cp -rp \ + /usr/share/doc/cgroup-tools/examples/cgsnapshot_blacklist.conf \ + /etc/cgsnapshot_blacklist.conf + `, + }); + }, + }, +}).catch(function (err) { + return console.error(err); +}); diff --git a/packages/system/env/cgroups/test.coffee b/packages/system/env/cgroups/test.coffee index 27e2063ab..c3c86c61c 100644 --- a/packages/system/env/cgroups/test.coffee +++ b/packages/system/env/cgroups/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: system_cgroups: true config: [ diff --git a/packages/system/env/info_archlinux/Dockerfile b/packages/system/env/info_archlinux/Dockerfile index cfa517a81..7bc59fc63 100644 --- a/packages/system/env/info_archlinux/Dockerfile +++ b/packages/system/env/info_archlinux/Dockerfile @@ -1,30 +1,29 @@ - FROM archlinux:latest -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " -RUN pacman --noconfirm -Syu \ - && pacman --noconfirm -S procps grep which sed +RUN pacman --noconfirm -Syu && \ + pacman --noconfirm -S procps grep which sed # Install Node.js RUN pacman --noconfirm -S nodejs npm # Install SSH and sudo -RUN pacman --noconfirm -S openssh sudo \ - && /usr/bin/ssh-keygen -A +RUN pacman --noconfirm -S openssh sudo && \ + /usr/bin/ssh-keygen -A ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita WORKDIR /nikita/packages/system # Sudo User -RUN useradd nikita -d /home/nikita \ - && mkdir -p /home/nikita \ - && chown nikita /home/nikita \ - && chmod 700 /home/nikita \ - && echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +RUN useradd nikita -d /home/nikita && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita -RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' \ - && cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/system/env/info_archlinux/docker-compose.yml b/packages/system/env/info_archlinux/docker-compose.yml index 97997c4ec..ea35e535e 100644 --- a/packages/system/env/info_archlinux/docker-compose.yml +++ b/packages/system/env/info_archlinux/docker-compose.yml @@ -2,10 +2,17 @@ services: nodejs: build: . - image: nikita_system_info_archlinux container_name: nikita_system_info_archlinux + environment: + NIKITA_TEST_MODULE: /nikita/packages/system/env/info_archlinux/test.coffee + image: nikita_system_info_archlinux + networks: + - nikita platform: linux/amd64 # Required on Apple M1 volumes: - ../../../../:/nikita - environment: - NIKITA_TEST_MODULE: /nikita/packages/system/env/info_archlinux/test.coffee + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/system/env/info_archlinux/test.coffee b/packages/system/env/info_archlinux/test.coffee index 5a5c36db1..c748cc556 100644 --- a/packages/system/env/info_archlinux/test.coffee +++ b/packages/system/env/info_archlinux/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: system_info_disks: true system_info_os: true diff --git a/packages/system/env/info_centos6/docker-compose.yml b/packages/system/env/info_centos6/docker-compose.yml index 2a16d3eb2..8c0accc0d 100644 --- a/packages/system/env/info_centos6/docker-compose.yml +++ b/packages/system/env/info_centos6/docker-compose.yml @@ -1,22 +1,30 @@ services: - target: - build: - context: . - dockerfile: ./target/Dockerfile - image: nikita_system_info_centos6_target - container_name: nikita_system_info_centos6_target - platform: linux/amd64 # Required on Apple M1 nodejs: build: context: . dockerfile: ./nodejs/Dockerfile - image: nikita_system_info_centos6_nodejs container_name: nikita_system_info_centos6_nodejs - platform: linux/amd64 # Required on Apple M1 depends_on: - target - volumes: - - ../../../../:/nikita environment: NIKITA_TEST_MODULE: /nikita/packages/system/env/info_centos6/test.coffee + image: nikita_system_info_centos6_nodejs + networks: + - nikita + volumes: + - ../../../../:/nikita + target: + build: + context: . + dockerfile: ./target/Dockerfile + container_name: nikita_system_info_centos6_target + image: nikita_system_info_centos6_target + networks: + - nikita + platform: linux/amd64 # Required on Apple M1 + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/system/env/info_centos6/nodejs/Dockerfile b/packages/system/env/info_centos6/nodejs/Dockerfile index e2acdf1dd..3752acfe3 100644 --- a/packages/system/env/info_centos6/nodejs/Dockerfile +++ b/packages/system/env/info_centos6/nodejs/Dockerfile @@ -1,27 +1,27 @@ -FROM centos:7.9.2009 -MAINTAINER David Worms +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " +ARG DEBIAN_FRONTEND=nonintercative RUN \ - # Install Node dependencies - yum install -y git make - -RUN yum clean all + apt update -y && \ + # Install Node.js dependencies + apt install -y build-essential curl git iputils-ping ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita WORKDIR /nikita/packages/system -# Sudo User -RUN useradd nikita -d /home/nikita \ - && mkdir -p /home/nikita \ - && chown nikita /home/nikita \ - && chmod 700 /home/nikita +# User +RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita USER nikita # Install Node.js -# Note, CentOS 7.9.2009 incompatible with Node.js >= 18 -RUN \ - curl -L https://git.io/n-install | bash -s -- -y 16.19 +ENV NODE_VERSION stable +RUN curl -L https://git.io/n-install | bash -s -- -y +# Note, bashrc not sourced unless running interactively ENV PATH /home/nikita/n/bin:$PATH ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/system/env/info_centos6/target/Dockerfile b/packages/system/env/info_centos6/target/Dockerfile index 35ef39ca8..f4eb8431a 100644 --- a/packages/system/env/info_centos6/target/Dockerfile +++ b/packages/system/env/info_centos6/target/Dockerfile @@ -1,25 +1,26 @@ FROM centos:6 -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " # Fix yum repo error, # see - https://github.com/adaltas/node-nikita/issues/184 RUN sed -i -e 's/mirrorlist/#mirrorlist/g' \ - -e 's/#baseurl/baseurl/g' \ - -e 's/http:\/\/mirror.centos.org\/centos\/$releasever/https:\/\/vault.centos.org\/6.10/g' \ - /etc/yum.repos.d/CentOS-Base.repo \ - && yum clean all \ - && yum -y update + -e 's/#baseurl/baseurl/g' \ + -e 's/http:\/\/mirror.centos.org\/centos\/$releasever/https:\/\/vault.centos.org\/6.10/g' \ + /etc/yum.repos.d/CentOS-Base.repo && \ + yum clean all +# Note, nov 2023, `yum update` fail to update gcc, commenting for now +# yum -y update # Install SSH #RUN \ # # Install SSH and sudo # yum install -y openssh-server openssh-clients sudo && \ # ssh-keygen -A -RUN yum install -y openssh-server openssh-clients sudo \ - # Avoid `Could not load host key: /etc/ssh/ssh_host_rsa_key` - && ssh-keygen -f /etc/ssh/ssh_host_rsa_key \ - # Avoid `Could not load host key: /etc/ssh/ssh_host_dsa_key` - && cp -rp /etc/ssh/ssh_host_rsa_key /etc/ssh/ssh_host_dsa_key +RUN yum install -y openssh-server openssh-clients sudo && \ + # Avoid `Could not load host key: /etc/ssh/ssh_host_rsa_key` + ssh-keygen -f /etc/ssh/ssh_host_rsa_key && \ + # Avoid `Could not load host key: /etc/ssh/ssh_host_dsa_key` + cp -rp /etc/ssh/ssh_host_rsa_key /etc/ssh/ssh_host_dsa_key RUN yum clean all @@ -27,16 +28,15 @@ ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita # Sudo User -RUN useradd nikita -d /home/nikita \ - && hash=$(echo "secret" | openssl passwd -1 -stdin) \ - && usermod --pass="$hash" nikita \ - && mkdir -p /home/nikita \ - && mkdir -p /home/nikita/.ssh \ - && chown nikita /home/nikita \ - && chmod 700 /home/nikita \ - && echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +RUN useradd nikita -d /home/nikita && \ + hash=$(echo "secret" | openssl passwd -1 -stdin) && \ + usermod --pass="$hash" nikita && \ + mkdir -p /home/nikita && \ + mkdir -p /home/nikita/.ssh && \ + chown -R nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita -RUN chown -R nikita /home/nikita/.ssh USER nikita ENTRYPOINT ["sudo", "/usr/sbin/sshd", "-D"] diff --git a/packages/system/env/info_centos6/test.coffee b/packages/system/env/info_centos6/test.coffee index 7cd12bbac..6d90f50db 100644 --- a/packages/system/env/info_centos6/test.coffee +++ b/packages/system/env/info_centos6/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: system_info_disks: false system_info_os: true @@ -13,6 +13,5 @@ module.exports = label: 'remote' ssh: host: 'target', username: 'nikita', - # private_key_path: '~/.ssh/id_rsa' password: 'secret' ] diff --git a/packages/system/env/info_centos7/Dockerfile b/packages/system/env/info_centos7/Dockerfile index e521a544c..e4e7ee120 100644 --- a/packages/system/env/info_centos7/Dockerfile +++ b/packages/system/env/info_centos7/Dockerfile @@ -1,6 +1,5 @@ - FROM centos:7.9.2009 -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " RUN \ # Install Node dependencies @@ -16,11 +15,11 @@ RUN mkdir -p /nikita WORKDIR /nikita/packages/system # Sudo User -RUN useradd nikita -d /home/nikita \ - && mkdir -p /home/nikita \ - && chown nikita /home/nikita \ - && chmod 700 /home/nikita \ - && echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +RUN useradd nikita -d /home/nikita && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita # Install Node.js @@ -30,6 +29,6 @@ RUN \ ENV PATH /home/nikita/n/bin:$PATH RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' \ - && cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys + && cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/system/env/info_centos7/docker-compose.yml b/packages/system/env/info_centos7/docker-compose.yml index f3cddd300..409070beb 100644 --- a/packages/system/env/info_centos7/docker-compose.yml +++ b/packages/system/env/info_centos7/docker-compose.yml @@ -1,10 +1,30 @@ services: nodejs: - build: . - image: nikita_system_info_centos7 - container_name: nikita_system_info_centos7 - volumes: - - ../../../../:/nikita + build: + context: . + dockerfile: ./nodejs/Dockerfile + container_name: nikita_system_info_centos7_nodejs + depends_on: + - target environment: NIKITA_TEST_MODULE: /nikita/packages/system/env/info_centos7/test.coffee + image: nikita_system_info_centos7_nodejs + networks: + - nikita + volumes: + - ../../../../:/nikita + target: + build: + context: . + dockerfile: ./target/Dockerfile + container_name: nikita_system_info_centos7_target + image: nikita_system_info_centos7_target + networks: + - nikita + # platform: linux/amd64 # Required on Apple M1 + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/system/env/info_centos7/entrypoint.sh b/packages/system/env/info_centos7/entrypoint.sh index f93f4fc84..d01ae4a9f 100755 --- a/packages/system/env/info_centos7/entrypoint.sh +++ b/packages/system/env/info_centos7/entrypoint.sh @@ -1,9 +1,6 @@ #!/bin/bash set -e -# Start ssh daemon -sudo /usr/sbin/sshd -# We have TTY, so probably an interactive container... if test -t 0; then # We have TTY, so probably an interactive container... if [[ $@ ]]; then diff --git a/packages/system/env/info_centos7/nodejs/Dockerfile b/packages/system/env/info_centos7/nodejs/Dockerfile new file mode 100644 index 000000000..3752acfe3 --- /dev/null +++ b/packages/system/env/info_centos7/nodejs/Dockerfile @@ -0,0 +1,27 @@ +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " + +ARG DEBIAN_FRONTEND=nonintercative +RUN \ + apt update -y && \ + # Install Node.js dependencies + apt install -y build-essential curl git iputils-ping + +ADD ./entrypoint.sh /entrypoint.sh +RUN mkdir -p /nikita +WORKDIR /nikita/packages/system + +# User +RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita +USER nikita + +# Install Node.js +ENV NODE_VERSION stable +RUN curl -L https://git.io/n-install | bash -s -- -y +# Note, bashrc not sourced unless running interactively +ENV PATH /home/nikita/n/bin:$PATH + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/system/env/info_centos7/target/Dockerfile b/packages/system/env/info_centos7/target/Dockerfile new file mode 100644 index 000000000..11cd9490d --- /dev/null +++ b/packages/system/env/info_centos7/target/Dockerfile @@ -0,0 +1,30 @@ +FROM centos:7.9.2009 +LABEL org.opencontainers.image.authors="David Worms " + +RUN \ + # Install Node dependencies + yum install -y git make && \ + # Install openssl for user password + yum install -y openssl \ + # Install SSH and sudo + yum install -y openssh-server openssh-clients sudo && \ + ssh-keygen -A + +RUN yum clean all + +# ADD ./entrypoint.sh /entrypoint.sh +# RUN mkdir -p /nikita + +# Sudo User +RUN useradd nikita -d /home/nikita && \ + hash=$(echo "secret" | openssl passwd -1 -stdin) && \ + usermod --pass="$hash" nikita && \ + mkdir -p /home/nikita && \ + mkdir -p /home/nikita/.ssh && \ + chown -R nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita + +# USER nikita + +ENTRYPOINT ["sudo", "/usr/sbin/sshd", "-D"] diff --git a/packages/system/env/info_centos7/test.coffee b/packages/system/env/info_centos7/test.coffee index bf7edc172..9c3c430bb 100644 --- a/packages/system/env/info_centos7/test.coffee +++ b/packages/system/env/info_centos7/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: system_info_disks: true system_info_os: true @@ -10,10 +10,9 @@ module.exports = # linux_version: /5\.10\.\d+/ version: /7\.9\.\d+/ config: [ - label: 'local' - , label: 'remote' ssh: - host: '127.0.0.1', username: process.env.USER, - private_key_path: '~/.ssh/id_ed25519' + host: 'target' + username: 'nikita' + password: 'secret' ] diff --git a/packages/system/env/info_ubuntu/Dockerfile b/packages/system/env/info_ubuntu/Dockerfile index 0e51d29f4..8f078d180 100644 --- a/packages/system/env/info_ubuntu/Dockerfile +++ b/packages/system/env/info_ubuntu/Dockerfile @@ -1,16 +1,15 @@ - -FROM ubuntu:20.04 -MAINTAINER David Worms +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " ARG DEBIAN_FRONTEND=nonintercative RUN \ - apt update -y && \ - # Install Node.js dependencies - apt install -y build-essential curl git && \ - # Install SSH and sudo - apt-get install -y openssh-server sudo && \ - ssh-keygen -A && \ - mkdir -p /run/sshd + apt update -y && \ + # Install Node.js dependencies + apt install -y build-essential curl git iputils-ping && \ + # Install SSH and sudo + apt-get install -y openssh-server sudo && \ + ssh-keygen -A && \ + mkdir -p /run/sshd ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita @@ -18,10 +17,10 @@ WORKDIR /nikita/packages/system # Sudo User RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ - mkdir -p /home/nikita && \ - chown nikita /home/nikita && \ - chmod 700 /home/nikita && \ - echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita # Install Node.js @@ -31,7 +30,7 @@ RUN curl -L https://git.io/n-install | bash -s -- -y ENV PATH /home/nikita/n/bin:$PATH RUN \ - ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ - cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys + ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/system/env/info_ubuntu/docker-compose.yml b/packages/system/env/info_ubuntu/docker-compose.yml index 4031ebf5d..ee28e6824 100644 --- a/packages/system/env/info_ubuntu/docker-compose.yml +++ b/packages/system/env/info_ubuntu/docker-compose.yml @@ -2,11 +2,18 @@ services: nodejs: build: . - image: nikita_system_info_ubuntu container_name: nikita_system_info_ubuntu - volumes: - - ../../../../:/nikita environment: NIKITA_TEST_MODULE: /nikita/packages/system/env/info_ubuntu/test.coffee DEBUG: 1 # DEBUG: 0 + image: nikita_system_info_ubuntu + networks: + - nikita + volumes: + - ../../../../:/nikita + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/system/env/info_ubuntu/test.coffee b/packages/system/env/info_ubuntu/test.coffee index b7f3db809..c5c9ea889 100644 --- a/packages/system/env/info_ubuntu/test.coffee +++ b/packages/system/env/info_ubuntu/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: system_info_disks: true system_info_os: true @@ -8,7 +8,7 @@ module.exports = arch: /x86_64|aarch64/ distribution: 'ubuntu' # linux_version: /5\.10\.\d+/ - version: '20.04' + version: '22.04' config: [ label: 'local' , diff --git a/packages/system/env/limits/Dockerfile b/packages/system/env/limits/Dockerfile index 0e51d29f4..354194750 100644 --- a/packages/system/env/limits/Dockerfile +++ b/packages/system/env/limits/Dockerfile @@ -1,16 +1,14 @@ - -FROM ubuntu:20.04 -MAINTAINER David Worms +FROM ubuntu:22.04 +LABEL org.opencontainers.image.authors="David Worms " ARG DEBIAN_FRONTEND=nonintercative -RUN \ - apt update -y && \ - # Install Node.js dependencies - apt install -y build-essential curl git && \ - # Install SSH and sudo - apt-get install -y openssh-server sudo && \ - ssh-keygen -A && \ - mkdir -p /run/sshd +RUN apt update -y && \ + # Install Node.js dependencies + apt install -y build-essential curl git iputils-ping && \ + # Install SSH and sudo + apt-get install -y openssh-server sudo && \ + ssh-keygen -A && \ + mkdir -p /run/sshd ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita @@ -18,10 +16,10 @@ WORKDIR /nikita/packages/system # Sudo User RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ - mkdir -p /home/nikita && \ - chown nikita /home/nikita && \ - chmod 700 /home/nikita && \ - echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita # Install Node.js @@ -30,8 +28,7 @@ RUN curl -L https://git.io/n-install | bash -s -- -y # Note, bashrc not sourced unless running interactively ENV PATH /home/nikita/n/bin:$PATH -RUN \ - ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ - cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/system/env/limits/test.coffee b/packages/system/env/limits/test.coffee index a80ba584a..bc2bfa0ff 100644 --- a/packages/system/env/limits/test.coffee +++ b/packages/system/env/limits/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: system_limits: true config: [ diff --git a/packages/system/env/run.sh b/packages/system/env/run.sh index 41a10341c..b0876d453 100755 --- a/packages/system/env/run.sh +++ b/packages/system/env/run.sh @@ -5,9 +5,9 @@ cd `pwd`/`dirname ${BASH_SOURCE}` # Require cgroup v1 if command -v multipass; then - ./cgroup-multipass/run.sh + ./cgroups-multipass/run.sh else - npx coffee ./env/cgroups/index.coffee run + node ./cgroups/index.js run fi ./info_archlinux/run.sh ./info_centos6/run.sh diff --git a/packages/system/env/tmpfs/Dockerfile b/packages/system/env/tmpfs/Dockerfile index ff3d48d42..9305fb029 100644 --- a/packages/system/env/tmpfs/Dockerfile +++ b/packages/system/env/tmpfs/Dockerfile @@ -1,35 +1,34 @@ FROM centos:7.9.2009 -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " RUN \ - # Install Node dependencies - yum install -y git make \ - # Install SSH and sudo - && yum install -y openssh-server openssh-clients sudo \ - && ssh-keygen -A + # Install Node dependencies + yum install -y git make && \ + # Install SSH and sudo + yum install -y openssh-server openssh-clients sudo && \ + ssh-keygen -A ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita WORKDIR /nikita/packages/system # Sudo User -RUN useradd nikita -d /home/nikita \ - && mkdir -p /home/nikita \ - && chown nikita /home/nikita \ - && chmod 700 /home/nikita \ - && echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +RUN useradd nikita -d /home/nikita && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita # Install Node.js ENV NODE_VERSION stable # Note, CentOS 7.9.2009 incompatible with Node.js >= 18 -RUN \ - curl -L https://git.io/n-install | bash -s -- -y 16.19 +RUN curl -L https://git.io/n-install | bash -s -- -y 16.19 # Note, bashrc not sourced unless running interactively # RUN . ~/.bashrc && n $NODE_VERSION ENV PATH /home/nikita/n/bin:$PATH -RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' \ - && cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/system/env/tmpfs/docker-compose.yml b/packages/system/env/tmpfs/docker-compose.yml index 81cf5b017..49f50706e 100644 --- a/packages/system/env/tmpfs/docker-compose.yml +++ b/packages/system/env/tmpfs/docker-compose.yml @@ -2,9 +2,16 @@ services: nodejs: build: . - image: nikita_system_tmpfs container_name: nikita_system_tmpfs - volumes: - - ../../../../:/nikita environment: NIKITA_TEST_MODULE: /nikita/packages/system/env/tmpfs/test.coffee + image: nikita_system_tmpfs + networks: + - nikita + volumes: + - ../../../../:/nikita + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/system/env/tmpfs/test.coffee b/packages/system/env/tmpfs/test.coffee index 491596bac..8846882c3 100644 --- a/packages/system/env/tmpfs/test.coffee +++ b/packages/system/env/tmpfs/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: system_tmpfs: true config: [ diff --git a/packages/system/env/user/Dockerfile b/packages/system/env/user/Dockerfile index fcdf5fbc4..25c99f781 100644 --- a/packages/system/env/user/Dockerfile +++ b/packages/system/env/user/Dockerfile @@ -1,15 +1,14 @@ FROM ubuntu:20.04 -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " ARG DEBIAN_FRONTEND=nonintercative -RUN \ - apt update -y && \ - # Install Node.js dependencies - apt install -y build-essential curl git && \ - # Install SSH and sudo - apt-get install -y openssh-server sudo && \ - ssh-keygen -A && \ - mkdir -p /run/sshd +RUN apt update -y && \ + # Install Node.js dependencies + apt install -y build-essential curl git iputils-ping && \ + # Install SSH and sudo + apt-get install -y openssh-server sudo && \ + ssh-keygen -A && \ + mkdir -p /run/sshd ADD ./entrypoint.sh /entrypoint.sh RUN mkdir -p /nikita @@ -17,10 +16,10 @@ WORKDIR /nikita/packages/system # Sudo User RUN useradd nikita -d /home/nikita -m -s /bin/bash && \ - mkdir -p /home/nikita && \ - chown nikita /home/nikita && \ - chmod 700 /home/nikita && \ - echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita # Install Node.js @@ -29,6 +28,6 @@ RUN curl -L https://git.io/n-install | bash -s -- -y ENV PATH /home/nikita/n/bin:$PATH RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ - cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/system/env/user/docker-compose.yml b/packages/system/env/user/docker-compose.yml index 84731ab14..af3f8df7e 100644 --- a/packages/system/env/user/docker-compose.yml +++ b/packages/system/env/user/docker-compose.yml @@ -2,9 +2,16 @@ services: nodejs: build: . - image: nikita_system_user container_name: nikita_system_user - volumes: - - ../../../../:/nikita environment: NIKITA_TEST_MODULE: /nikita/packages/system/env/user/test.coffee + image: nikita_system_user + networks: + - nikita + volumes: + - ../../../../:/nikita + +networks: + nikita: + name: nikita + driver: bridge diff --git a/packages/system/env/user/test.coffee b/packages/system/env/user/test.coffee index 5fced5899..70677bad4 100644 --- a/packages/system/env/user/test.coffee +++ b/packages/system/env/user/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: system_user: true system_group: true diff --git a/packages/system/lib/cgroups/index.js b/packages/system/lib/cgroups/index.js index 562bf5ae5..09b405439 100644 --- a/packages/system/lib/cgroups/index.js +++ b/packages/system/lib/cgroups/index.js @@ -1,12 +1,12 @@ // Dependencies -const path = require('path'); -const {merge} = require('mixme'); -const utils = require('../utils'); -const definitions = require('./schema.json'); +import path from 'node:path' +import {merge} from 'mixme'; +import utils from "@nikitajs/system/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { config.mounts ??= [] config.groups ??= {}; diff --git a/packages/system/lib/group/index.js b/packages/system/lib/group/index.js index c861ba470..748784bf9 100644 --- a/packages/system/lib/group/index.js +++ b/packages/system/lib/group/index.js @@ -1,11 +1,10 @@ // Dependencies -const definitions = require('./schema.json'); -const utils = require('../utils'); -const esa = utils.string.escapeshellarg; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { // throw Error 'Invalid gid option' if config.gid? and isNaN config.gid const { groups } = await this.system.group.read(); diff --git a/packages/system/lib/group/read/index.js b/packages/system/lib/group/read/index.js index a43b450d6..d35da5cb1 100644 --- a/packages/system/lib/group/read/index.js +++ b/packages/system/lib/group/read/index.js @@ -1,7 +1,7 @@ // Dependencies -const definitions = require('./schema.json'); -const utils = require('../../utils'); +import definitions from "./schema.json" assert { type: "json" }; +import utils from "@nikitajs/system/utils"; // Parse the groups output const str2groups = function (data) { @@ -22,7 +22,7 @@ const str2groups = function (data) { }; // Action -module.exports = { +export default { handler: async function ({ config }) { if (typeof config.gid === "string" && /\d+/.test(config.gid)) { config.gid = parseInt(config.gid, 10); diff --git a/packages/system/lib/group/read/schema.json b/packages/system/lib/group/read/schema.json index 2cb06a51e..f8e33a01f 100644 --- a/packages/system/lib/group/read/schema.json +++ b/packages/system/lib/group/read/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/gid", + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/gid", "description": "Retrieve the information for a specific group name or gid." }, "target": { diff --git a/packages/system/lib/group/remove/index.js b/packages/system/lib/group/remove/index.js index dd2897eab..6c3407e77 100644 --- a/packages/system/lib/group/remove/index.js +++ b/packages/system/lib/group/remove/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: function({metadata, config}) { this.execute({ command: `groupdel ${config.name}`, diff --git a/packages/system/lib/info/disks/index.js b/packages/system/lib/info/disks/index.js index 7fcf1dc22..4b9caa8b3 100644 --- a/packages/system/lib/info/disks/index.js +++ b/packages/system/lib/info/disks/index.js @@ -1,9 +1,9 @@ // Dependencies -const utils = require("../../utils"); -const definitions = require("./schema.json"); +import utils from "@nikitajs/system/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { const { stdout } = await this.execute({ command: `df --output='${config.output.join(",")}'`, diff --git a/packages/system/lib/info/os/index.js b/packages/system/lib/info/os/index.js index f2afb7987..deb284671 100644 --- a/packages/system/lib/info/os/index.js +++ b/packages/system/lib/info/os/index.js @@ -1,9 +1,9 @@ // Dependencies -const utils = require("../../utils"); -const definitions = require("./schema.json"); +import utils from "@nikitajs/system/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function () { // Using `utils.os.command` to be consistant with OS conditions plugin in core const { stdout } = await this.execute(utils.os.command); diff --git a/packages/system/lib/limits/index.js b/packages/system/lib/limits/index.js index 5a42dbe75..de971ca86 100644 --- a/packages/system/lib/limits/index.js +++ b/packages/system/lib/limits/index.js @@ -1,9 +1,9 @@ // Dependencies -const { regexp } = require("../utils"); -const definitions = require("./schema.json"); +import regexp from "@nikitajs/core/utils/regexp"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { if (config.system && config.user) { throw Error( diff --git a/packages/system/lib/mod/index.js b/packages/system/lib/mod/index.js index df4c8d1ae..8362ad3c8 100644 --- a/packages/system/lib/mod/index.js +++ b/packages/system/lib/mod/index.js @@ -1,12 +1,12 @@ // Dependencies -const path = require('path'); -const quote = require('regexp-quote'); -const dedent = require('dedent'); -const definitions = require('./schema.json'); +import path from 'node:path' +import quote from "regexp-quote"; +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({metadata, config}) { for (const module in config.modules) { const active = config.modules[module]; diff --git a/packages/system/lib/register.js b/packages/system/lib/register.js index 0b4f07fcd..e012213af 100644 --- a/packages/system/lib/register.js +++ b/packages/system/lib/register.js @@ -1,43 +1,31 @@ -// Generated by CoffeeScript 2.7.0 -// Registration of `nikita.system` actions -var registry; -require('@nikitajs/file/lib/register'); +// Dependencies +import '@nikitajs/file/register'; +import registry from '@nikitajs/core/registry'; -registry = require('@nikitajs/core/lib/registry'); - -module.exports = { +const actions = { system: { - cgroups: '@nikitajs/system/lib/cgroups', + cgroups: '@nikitajs/system/cgroups', group: { - '': '@nikitajs/system/lib/group', - read: '@nikitajs/system/lib/group/read', - remove: '@nikitajs/system/lib/group/remove' + '': '@nikitajs/system/group', + read: '@nikitajs/system/group/read', + remove: '@nikitajs/system/group/remove' }, info: { - disks: '@nikitajs/system/lib/info/disks', - os: '@nikitajs/system/lib/info/os' + disks: '@nikitajs/system/info/disks', + os: '@nikitajs/system/info/os' }, - limits: '@nikitajs/system/lib/limits', - mod: '@nikitajs/system/lib/mod', - running: '@nikitajs/system/lib/running', - tmpfs: '@nikitajs/system/lib/tmpfs', - uid_gid: '@nikitajs/system/lib/uid_gid', + limits: '@nikitajs/system/limits', + mod: '@nikitajs/system/mod', + running: '@nikitajs/system/running', + tmpfs: '@nikitajs/system/tmpfs', + uid_gid: '@nikitajs/system/uid_gid', user: { - '': '@nikitajs/system/lib/user', - read: '@nikitajs/system/lib/user/read', - remove: '@nikitajs/system/lib/user/remove' + '': '@nikitajs/system/user', + read: '@nikitajs/system/user/read', + remove: '@nikitajs/system/user/remove' } } }; -(async function() { - var err; - try { - return (await registry.register(module.exports)); - } catch (error) { - err = error; - console.error(err.stack); - return process.exit(1); - } -})(); +await registry.register(actions) diff --git a/packages/system/lib/running/index.js b/packages/system/lib/running/index.js index c776851e0..3adb9d003 100644 --- a/packages/system/lib/running/index.js +++ b/packages/system/lib/running/index.js @@ -1,10 +1,10 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require('./schema.json'); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({ config, tools: {log} diff --git a/packages/system/lib/tmpfs/index.js b/packages/system/lib/tmpfs/index.js index bd345f7fe..736fc7a6e 100644 --- a/packages/system/lib/tmpfs/index.js +++ b/packages/system/lib/tmpfs/index.js @@ -1,19 +1,23 @@ // Dependencies -const {merge} = require('mixme'); -const utils = require('../utils'); -const definitions = require('./schema.json'); +import {merge} from 'mixme'; +import utils from "@nikitajs/system/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { // for now only support directory type path option - let content = {}; - content[config.mount] = {}; + let content = { + [config.mount]: { + type: "d", + }, + }; + // content[config.mount] = {}; for (const key of ["mount", "perm", "uid", "gid", "age", "argu"]) { content[config.mount][key] = config[key]; } - content[config.mount]["type"] = "d"; + // content[config.mount]["type"] = "d"; if (config.uid != null) { if (!/^[0-9]+/.exec(config.uid)) { if (config.name == null) { @@ -27,10 +31,7 @@ module.exports = { ? `/etc/tmpfiles.d/${config.name}.conf` : "/etc/tmpfiles.d/default.conf"; } - log({ - message: `target set to ${config.target}`, - level: "DEBUG", - }); + log("DEBUG", `target set to ${config.target}`); if (config.merge) { log("DEBUG", "opening target file for merge"); try { @@ -39,10 +40,7 @@ module.exports = { encoding: "utf8", }); content = merge(utils.tmpfs.parse(data), content); - log({ - message: "content has been merged", - level: "DEBUG", - }); + log("DEBUG", "content has been merged"); } catch (error) { if (error.code !== "NIKITA_FS_CRS_TARGET_ENOENT") { throw error; @@ -59,10 +57,7 @@ module.exports = { uid: config.uid, }); if ($status) { - log({ - message: `re-creating ${config.mount} tmpfs file`, - level: "INFO", - }); + log("INFO", `re-creating ${config.mount} tmpfs file`); await this.execute({ command: `systemd-tmpfiles --remove ${config.target}`, }); diff --git a/packages/system/lib/tmpfs/schema.json b/packages/system/lib/tmpfs/schema.json index 8e849b299..42cb93198 100644 --- a/packages/system/lib/tmpfs/schema.json +++ b/packages/system/lib/tmpfs/schema.json @@ -19,7 +19,7 @@ "description": "Create a backup, append a provided string to the filename extension or\na timestamp if value is not a string, only apply if the target file\nexists and is modified." }, "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/gid", + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/gid", "description": "File group name or group id." }, "merge": { @@ -32,7 +32,7 @@ "description": "The mount point dir to create on system startup." }, "mode": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chmod#/definitions/config/properties/mode", + "$ref": "module://@nikitajs/core/actions/fs/chmod#/definitions/config/properties/mode", "description": "Mode of the target configuration file" }, "name": { @@ -49,7 +49,7 @@ "description": "File path where to write content to. Defined to\n/etc/tmpfiles.d/{config.uid}.conf if uid is defined or\n/etc/tmpfiles.d/default.conf." }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/uid", + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/uid", "description": "File user name or user id." } }, diff --git a/packages/system/lib/uid_gid/index.js b/packages/system/lib/uid_gid/index.js index d81019625..9c60d802a 100644 --- a/packages/system/lib/uid_gid/index.js +++ b/packages/system/lib/uid_gid/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config }) { if (typeof config.uid === "string") { const { user } = await this.system.user.read({ diff --git a/packages/system/lib/uid_gid/schema.json b/packages/system/lib/uid_gid/schema.json index 2b5194810..c3f24a654 100644 --- a/packages/system/lib/uid_gid/schema.json +++ b/packages/system/lib/uid_gid/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "gid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/gid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/gid" }, "group_target": { "type": "string", @@ -14,7 +14,7 @@ "description": "Path to the passwd definition file, default to \"/etc/passwd\"." }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/uid" + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/uid" } } } diff --git a/packages/system/lib/user/index.js b/packages/system/lib/user/index.js index d1077f1f4..bf08139fa 100644 --- a/packages/system/lib/user/index.js +++ b/packages/system/lib/user/index.js @@ -1,12 +1,12 @@ // Dependencies -const path = require('path'); -const dedent = require('dedent'); -const utils = require("../utils"); -const definitions = require('./schema.json'); +import path from 'node:path' +import dedent from "dedent"; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({ config, tools: {log} @@ -52,7 +52,7 @@ module.exports = { config.home && "-m", config.home && `-d ${config.home}`, config.shell && `-s ${config.shell}`, - config.comment && `-c ${utils.string.escapeshellarg(config.comment)}`, + config.comment && `-c ${esa(config.comment)}`, config.uid && `-u ${config.uid}`, config.gid && `-g ${config.gid}`, config.expiredate && `-e ${config.expiredate}`, @@ -95,7 +95,16 @@ module.exports = { try { await this.execute({ $if: changed.length, - command: ['usermod', config.home ? `-d ${config.home}` : void 0, config.shell ? `-s ${config.shell}` : void 0, config.comment != null ? `-c ${utils.string.escapeshellarg(config.comment)}` : void 0, config.gid ? `-g ${config.gid}` : void 0, config.groups ? `-G ${config.groups.join(',')}` : void 0, config.uid ? `-u ${config.uid}` : void 0, `${config.name}`].join(' ') + command: [ + "usermod", + config.home && `-d ${config.home}`, + config.shell && `-s ${config.shell}`, + config.comment && `-c ${esa(config.comment)}`, + config.gid && `-g ${config.gid}`, + config.groups && `-G ${config.groups.join(",")}`, + config.uid && `-u ${config.uid}`, + `${config.name}`, + ].filter(Boolean).join(" "), }); } catch (error) { if (error.exit_code === 8) { diff --git a/packages/system/lib/user/read/index.js b/packages/system/lib/user/read/index.js index eb00cb760..7744caeed 100644 --- a/packages/system/lib/user/read/index.js +++ b/packages/system/lib/user/read/index.js @@ -1,6 +1,6 @@ // Dependencies -const utils = require("../../utils"); -const definitions = require("./schema.json"); +import utils from "@nikitajs/system/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Parse the passwd output const str2passwd = function (data) { @@ -23,7 +23,7 @@ const str2passwd = function (data) { }; // Action -module.exports = { +export default { handler: async function ({ config }) { if (typeof config.uid === "string" && /\d+/.test(config.uid)) { config.uid = parseInt(config.uid, 10); diff --git a/packages/system/lib/user/read/schema.json b/packages/system/lib/user/read/schema.json index 3053a87ab..ffb94b5a1 100644 --- a/packages/system/lib/user/read/schema.json +++ b/packages/system/lib/user/read/schema.json @@ -7,7 +7,7 @@ "description": "Path to the passwd definition file, use the `getent passwd` command by\ndefault which use to \"/etc/passwd\"." }, "uid": { - "$ref": "module://@nikitajs/core/lib/actions/fs/chown#/definitions/config/properties/uid", + "$ref": "module://@nikitajs/core/actions/fs/chown#/definitions/config/properties/uid", "description": "Retrieve the information for a specific username or uid." } } diff --git a/packages/system/lib/user/remove/index.js b/packages/system/lib/user/remove/index.js index cefc9cb1e..69fa02158 100644 --- a/packages/system/lib/user/remove/index.js +++ b/packages/system/lib/user/remove/index.js @@ -1,9 +1,9 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: function({config}) { this.execute({ command: `userdel ${config.name}`, diff --git a/packages/system/lib/utils/cgconfig.js b/packages/system/lib/utils/cgconfig.js index 01b7c4394..30a5b237b 100644 --- a/packages/system/lib/utils/cgconfig.js +++ b/packages/system/lib/utils/cgconfig.js @@ -1,9 +1,6 @@ -// Generated by CoffeeScript 2.7.0 -var utils; +import string from '@nikitajs/core/utils/string'; -utils = require('@nikitajs/core/lib/utils'); - -module.exports = { +export default { parse: function(str) { let list_of_mount_sections = []; let list_of_group_sections = {}; @@ -20,8 +17,7 @@ module.exports = { let current_group_section = null; // group section is a tree but only of group let current_controller_name = null; let current_group_section_perm_name = null; - utils.string.lines(str).forEach(function(line, _, __) { - var base, base1, match, match_admin, name, name1, name2, name4, sep, type, value; + string.lines(str).forEach(function(line, _, __) { if (!line || line.match(/^\s*$/)) { return; } @@ -32,18 +28,18 @@ module.exports = { } if (/^(group)\s([A-z|0-9|\/]*)\s{$/.test(line)) { // start of a group object current_group = true; - match = /^(group)\s([A-z|0-9|\/]*)\s{$/.exec(line); + const match = /^(group)\s([A-z|0-9|\/]*)\s{$/.exec(line); current_group_name = match[2]; current_group_section = {}; - if (list_of_group_sections[name1 = `${current_group_name}`] == null) { - list_of_group_sections[name1] = {}; + if (list_of_group_sections[current_group_name] == null) { + list_of_group_sections[current_group_name] = {}; } } if (/^(default)\s{$/.test(line)) { // start of a special group object named default current_group = true; current_group_name = ''; current_group_section = {}; - return list_of_group_sections[name2 = `${current_group_name}`] != null ? list_of_group_sections[name2] : list_of_group_sections[name2] = {}; + list_of_group_sections[current_group_name] ??= {}; } } else { // we are parsing a mount object @@ -56,10 +52,7 @@ module.exports = { } else { // add the line to mont object line = line.replace(';', ''); - sep = '='; - if (line.indexOf(':') !== -1) { - sep = ':'; - } + const sep = line.indexOf(':') !== -1 ? ':' : '='; line = line.split(sep); current_mount_section.push({ type: `${line[0].trim()}`, @@ -74,23 +67,23 @@ module.exports = { if (/^(\s*)?}$/.test(line)) { if (current_group) { if (current_group_controller) { - return current_group_controller = false; + current_group_controller = false; } else if (current_group_perm) { if (current_group_perm_content) { - return current_group_perm_content = false; + current_group_perm_content = false; } else { - return current_group_perm = false; + current_group_perm = false; } } else { current_group = false; // push the group if the closing bracket is closing a group // list_of_group_sections["#{current_group_name}"] = current_group_section - return current_group_section = null; + current_group_section = null; } } } else { //closing the group object - match = /^\s*(cpuset|cpu|cpuacct|blkio|memory|devices|freezer|net_cls|perf_event|net_prio|hugetlb|pids|rdma)\s{$/.exec(line); + const match = /^\s*(cpuset|cpu|cpuacct|blkio|memory|devices|freezer|net_cls|perf_event|net_prio|hugetlb|pids|rdma)\s{$/.exec(line); if (!current_group_perm && !current_group_controller) { //if neither working in perm or controller section, we are declaring one of them if (/^\s*perm\s{$/.test(line)) { // perm declaration @@ -102,31 +95,27 @@ module.exports = { current_group_controller = true; current_controller_name = match[1]; current_group_section[`${current_controller_name}`] = {}; - return (base = list_of_group_sections[`${current_group_name}`])[current_controller_name] != null ? base[current_controller_name] : base[current_controller_name] = {}; + list_of_group_sections[`${current_group_name}`][current_controller_name] ??= {}; } } else if (current_group_perm && current_group_perm_content) { // perm config line = line.replace(';', ''); line = line.split('='); - [type, value] = line; + const [type, value] = line; current_group_section['perm'][current_group_section_perm_name][type.trim()] = value.trim(); - return list_of_group_sections[`${current_group_name}`]['perm'][current_group_section_perm_name][type.trim()] = value.trim(); + list_of_group_sections[`${current_group_name}`]['perm'][current_group_section_perm_name][type.trim()] = value.trim(); } else if (current_group_controller) { // controller config line = line.replace(';', ''); - sep = '='; - if (line.indexOf(':') !== -1) { - sep = ':'; - } - line = line.split(sep); - [type, value] = line; - return (base1 = list_of_group_sections[`${current_group_name}`][`${current_controller_name}`])[name4 = type.trim()] != null ? base1[name4] : base1[name4] = value.trim(); + const sep = line.indexOf(':') !== -1 ? ':' : '='; + const [type, value] = line.split(sep); + list_of_group_sections[`${current_group_name}`][`${current_controller_name}`][type.trim()] ??= value.trim() } else { - match_admin = /^\s*(admin|task)\s{$/.exec(line); + const match_admin = /^\s*(admin|task)\s{$/.exec(line); if (match_admin) { // admin or task declaration - [_, name] = match_admin; //the name is either admin or task + const [_, name] = match_admin; //the name is either admin or task current_group_perm_content = true; current_group_section_perm_name = name; current_group_section['perm'][name] = {}; - return list_of_group_sections[`${current_group_name}`]['perm'][name] = {}; + list_of_group_sections[`${current_group_name}`]['perm'][name] = {}; } } } @@ -139,7 +128,6 @@ module.exports = { }; }, stringify: function(obj, config = {}) { - var i, indent, j, k, l, len, mount, name, ref, ref1, val; if (obj.mounts == null) { obj.mounts = []; } @@ -149,23 +137,18 @@ module.exports = { if (config.indent == null) { config.indent = 2; } - indent = ''; - for (i = j = 1, ref = config.indent; (1 <= ref ? j <= ref : j >= ref); i = 1 <= ref ? ++j : --j) { - indent += ' '; - } + let indent = ' '.repeat(config.indent); const sections = []; if (obj.mounts.length !== 0) { let mount_render = "mount {\n"; - ref1 = obj.mounts; - for (k = l = 0, len = ref1.length; l < len; k = ++l) { - mount = ref1[k]; + for (const mount of obj.mounts) { mount_render += `${indent}${mount.type} = ${mount.path};\n`; } mount_render += '}'; sections.push(mount_render); } let count = 0; - for (name in obj.groups) { + for (const name in obj.groups) { const group = obj.groups[name]; let group_render = (name === '') || (name === 'default') ? 'default {\n' : `group ${name} {\n`; for (const key in group) { @@ -175,7 +158,7 @@ module.exports = { if (value['admin'] != null) { group_render += `${indent}${indent}admin {\n`; for (const prop in value['admin']) { - val = value['admin'][prop]; + const val = value['admin'][prop]; group_render += `${indent}${indent}${indent}${prop} = ${val};\n`; } group_render += `${indent}${indent}}\n`; diff --git a/packages/system/lib/utils/index.js b/packages/system/lib/utils/index.js index ebfdae806..627f67152 100644 --- a/packages/system/lib/utils/index.js +++ b/packages/system/lib/utils/index.js @@ -1,10 +1,9 @@ -// Generated by CoffeeScript 2.7.0 -var utils; +import utils from "@nikitajs/core/utils"; +import cgconfig from "@nikitajs/system/utils/cgconfig"; +import tmpfs from "@nikitajs/system/utils/tmpfs"; -utils = require('@nikitajs/core/lib/utils'); - -module.exports = { +export default { ...utils, - cgconfig: require('./cgconfig'), - tmpfs: require('./tmpfs') + cgconfig: cgconfig, + tmpfs: tmpfs, }; diff --git a/packages/system/lib/utils/tmpfs.js b/packages/system/lib/utils/tmpfs.js index cec48e36e..c0318b3e6 100644 --- a/packages/system/lib/utils/tmpfs.js +++ b/packages/system/lib/utils/tmpfs.js @@ -1,52 +1,46 @@ -// Generated by CoffeeScript 2.7.0 + // parse the content of tmpfs daemon configuration file -var string; +import string from "@nikitajs/core/utils/string"; + +const properties = ['type', 'mount', 'perm', 'uid', 'gid', 'age', 'argu'] -string = require('@nikitajs/core/lib/utils/string'); +const parse = function (str) { + const files = {}; + string.lines(str).forEach(function(line, _, __) { + if (!line || line.match(/^#.*$/)) { + return; + } + const values = line.split(/\s+/); + const [, mount] = values; + const record = {}; + for (const i in properties) { + const property = properties[i]; + const value = values[i]; + record[property] = value === '-' ? undefined : value; + } + files[mount] = record; + }); + return files; +}; -module.exports = { - parse: function(str) { - var files, lines; - lines = string.lines(str); - files = {}; - lines.forEach(function(line, _, __) { - var age, argu, gid, i, key, mode, mount, obj, ref, results, type, uid, values; - if (!line || line.match(/^#.*$/)) { - return; - } - values = [type, mount, mode, uid, gid, age, argu] = line.split(/\s+/); - obj = {}; - ref = ['type', 'mount', 'perm', 'uid', 'gid', 'age', 'argu']; - results = []; - for (i in ref) { - key = ref[i]; - obj[key] = values[i] !== void 0 ? values[i] : '-'; - if (i === `${values.length - 1}`) { - if (obj['mount'] != null) { - results.push(files[mount] = obj); - } else { - results.push(void 0); - } - } else { - results.push(void 0); - } - } - return results; - }); - return files; - }, - stringify: function(obj) { - var i, k, key, lines, ref, v; - lines = []; - for (k in obj) { - v = obj[k]; - ref = ['mount', 'perm', 'uid', 'gid', 'age', 'argu']; - for (i in ref) { - key = ref[i]; - v[key] = v[key] !== void 0 ? v[key] : '-'; - } - lines.push(`${v.type} ${v.mount} ${v.perm} ${v.uid} ${v.gid} ${v.age} ${v.argu}`); +const stringify = function (record) { + const lines = []; + for (const v of Object.values(record)) { + const keys = ["mount", "perm", "uid", "gid", "age", "argu"]; + for (const key of keys) { + v[key] = v[key] !== undefined ? v[key] : "-"; } - return lines.join('\n'); + lines.push( + `${v.type} ${v.mount} ${v.perm} ${v.uid} ${v.gid} ${v.age} ${v.argu}` + ); } + return lines.join("\n"); +}; + +export { parse, properties, stringify }; + +export default { + parse: parse, + properties: properties, + stringify: stringify, }; diff --git a/packages/system/package.json b/packages/system/package.json index 637901e5e..ab1452756 100644 --- a/packages/system/package.json +++ b/packages/system/package.json @@ -1,5 +1,6 @@ { "name": "@nikitajs/system", + "version": "1.0.0-alpha.3", "description": "Provides Nikita actions for various system management operations.", "keywords": [ "nikita", @@ -16,7 +17,6 @@ "limits", "mod" ], - "version": "1.0.0-alpha.3", "author": "David Worms ", "bugs": { "url": "https://github.com/adaltas/node-nikita/issues" @@ -58,20 +58,26 @@ "engines": { "node": ">= 10.0.0" }, + "exports": { + "./register": "./lib/register.js", + "./utils": "./lib/utils/index.js", + "./utils/*": "./lib/utils/*.js", + "./*": "./lib/*/index.js" + }, "files": [ "/lib" ], "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/system/lib/register" - ], "inline-diffs": true, - "timeout": 20000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/system/register", + "should" + ], + "throw-deprecation": true, + "timeout": 20000 }, "publishConfig": { "access": "public" @@ -87,5 +93,6 @@ "test": "npm run test:local && npm run test:env", "test:env": "env/run.sh", "test:local": "mocha 'test/**/*.coffee'" - } + }, + "type": "module" } diff --git a/packages/system/test.sample.coffee b/packages/system/test.sample.coffee index 35f630e31..664e6ddb8 100644 --- a/packages/system/test.sample.coffee +++ b/packages/system/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: api: true posix: true @@ -21,5 +21,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/system/test/cgroups.coffee b/packages/system/test/cgroups.coffee index 4e7002649..4e64d0cf8 100644 --- a/packages/system/test/cgroups.coffee +++ b/packages/system/test/cgroups.coffee @@ -1,12 +1,12 @@ -utils = require '../lib/utils' -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.system_cgroups +import nikita from '@nikitajs/core' +import utils from '@nikitajs/system/utils' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.cgroups', -> + return unless test.tags.system_cgroups describe 'generate without merge', -> @@ -50,7 +50,7 @@ describe 'system.cgroups', -> mounts: mounts merge:false $status.should.be.true() - @fs.assert + await @fs.assert target: "#{tmpdir}/a_file_mount_only.cgconfig.conf" content: """ mount { @@ -73,7 +73,7 @@ describe 'system.cgroups', -> groups: groups merge: false $status.should.be.true() - @fs.assert + await @fs.assert target: "#{tmpdir}/a_file_cgroup_only.cgconfig.conf" content: """ group toto { @@ -106,7 +106,7 @@ describe 'system.cgroups', -> default: def merge: false $status.should.be.true() - @fs.assert + await @fs.assert target: "#{tmpdir}/a_file_default_only.cgconfig.conf" content: """ default { @@ -141,7 +141,7 @@ describe 'system.cgroups', -> mounts: mounts merge: false $status.should.be.true() - @fs.assert + await @fs.assert target: "#{tmpdir}/a_file_complete.cgconfig.conf" content: """ mount { @@ -192,7 +192,7 @@ describe 'system.cgroups', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @system.cgroups + await @system.cgroups target: "#{tmpdir}/a_file_complete.cgconfig.conf" mode: 0o0754 default: def @@ -264,7 +264,7 @@ describe 'system.cgroups', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @system.cgroups + await @system.cgroups target: "#{tmpdir}/a_file_merge_mount_groups.cgconfig.conf" mode: 0o0754 groups: groups diff --git a/packages/system/test/group/index.coffee b/packages/system/test/group/index.coffee index c251313c0..ef8bab3f7 100644 --- a/packages/system/test/group/index.coffee +++ b/packages/system/test/group/index.coffee @@ -1,19 +1,19 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.system_group +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.group', -> + return unless test.tags.system_group they 'accept only user name', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @system.user.remove 'toto' - @system.group.remove 'toto' + await @system.user.remove 'toto' + await @system.group.remove 'toto' {$status} = await @system.group 'toto' $status.should.be.true() {$status} = await @system.group 'toto' @@ -24,8 +24,8 @@ describe 'system.group', -> $ssh: ssh $sudo: sudo , -> - @system.user.remove 'toto' - @system.group.remove 'toto' + await @system.user.remove 'toto' + await @system.group.remove 'toto' {$status} = await @system.group 'toto', gid: '1234' $status.should.be.true() {$status} = await @system.group 'toto', gid: '1234' @@ -37,8 +37,8 @@ describe 'system.group', -> nikita $ssh: ssh , -> - @system.group.remove 'toto' - @system.group 'toto', gid: '' + await @system.group.remove 'toto' + await @system.group 'toto', gid: '' .should.be.rejectedWith code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' diff --git a/packages/system/test/group/read.coffee b/packages/system/test/group/read.coffee index 6019370ef..2ebc0652d 100644 --- a/packages/system/test/group/read.coffee +++ b/packages/system/test/group/read.coffee @@ -1,19 +1,20 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.group.read', -> describe 'with option `target`', -> - return unless tags.posix + return unless test.tags.posix they 'shy doesnt modify the status', ({ssh}) -> nikita $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @file + await @file target: "#{tmpdir}/etc/group" content: """ root:x:0:root @@ -28,7 +29,7 @@ describe 'system.group.read', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @file + await @file target: "#{tmpdir}/etc/group" content: """ root:x:0:root @@ -41,15 +42,15 @@ describe 'system.group.read', -> bin: group: 'bin', password: 'x', gid: 1, users: [ 'root', 'bin', 'daemon' ] describe 'without option `target`', -> - return unless tags.system_user + return unless test.tags.system_user they 'use `getent` without target', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @system.group.remove 'toto' - @system.group + await @system.group.remove 'toto' + await @system.group name: 'toto' gid: 1010 {group} = await @system.group.read @@ -62,14 +63,14 @@ describe 'system.group.read', -> @system.group.remove 'toto' describe 'option "gid"', -> - return unless tags.posix + return unless test.tags.posix they 'map a username to group record', ({ssh}) -> nikita $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @file + await @file target: "#{tmpdir}/etc/group" content: """ root:x:0:root @@ -86,7 +87,7 @@ describe 'system.group.read', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @file + await @file target: "#{tmpdir}/etc/group" content: """ root:x:0:root diff --git a/packages/system/test/group/remove.coffee b/packages/system/test/group/remove.coffee index 791f11b17..c0554c6ff 100644 --- a/packages/system/test/group/remove.coffee +++ b/packages/system/test/group/remove.coffee @@ -1,20 +1,20 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.system_group +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.group.remove', -> + return unless test.tags.system_group they 'handle status', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @system.user.remove 'toto' - @system.group.remove 'toto' - @system.group 'toto' + await @system.user.remove 'toto' + await @system.group.remove 'toto' + await @system.group 'toto' {$status} = await @system.group.remove 'toto' $status.should.be.true() {$status} = await @system.group.remove 'toto' diff --git a/packages/system/test/info/disks.coffee b/packages/system/test/info/disks.coffee index 28ef63095..bbb5a849e 100644 --- a/packages/system/test/info/disks.coffee +++ b/packages/system/test/info/disks.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.system_info_disks +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.info.disks', -> + return unless test.tags.system_info_disks they 'with no options', ({ssh}) -> nikita diff --git a/packages/system/test/info/os.coffee b/packages/system/test/info/os.coffee index 8588007d8..272d3c5e0 100644 --- a/packages/system/test/info/os.coffee +++ b/packages/system/test/info/os.coffee @@ -1,20 +1,20 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, expect} = require '../test' -they = require('mocha-they')(config) - -return unless tags.system_info_os +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.info.os', -> + return unless test.tags.system_info_os they 'default options', ({ssh}) -> nikita $ssh: ssh , -> - {stdout: expect.os.linux_version} = await @execute 'uname -r', trim: true unless expect.os.linux_version + {stdout: test.expect.os.linux_version} = await @execute 'uname -r', trim: true unless test.expect.os.linux_version {os} = await @system.info.os() Object.keys(os).sort().should.eql [ 'arch', 'distribution', 'linux_version', 'version' ] - Object.keys(os).sort().should.eql Object.keys(expect.os).sort() - os.should.match expect.os + Object.keys(os).sort().should.eql Object.keys(test.expect.os).sort() + os.should.match test.expect.os diff --git a/packages/system/test/limits.coffee b/packages/system/test/limits.coffee index b31f8e31f..845db59f8 100644 --- a/packages/system/test/limits.coffee +++ b/packages/system/test/limits.coffee @@ -1,13 +1,13 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.limits', -> describe 'schema', -> - - return unless tags.api + return unless test.tags.api it 'system or user is required', -> nikita @@ -37,8 +37,7 @@ describe 'system.limits', -> code: 'NIKITA_SCHEMA_VALIDATION_CONFIG' describe 'usage', -> - - return unless tags.system_limits + return unless test.tags.system_limits they 'do nothing without any limits', ({ssh}) -> nikita @@ -116,7 +115,7 @@ describe 'system.limits', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @system.limits + await @system.limits target: "#{tmpdir}/limits.conf" user: 'me' nofile: 2048 @@ -134,7 +133,7 @@ describe 'system.limits', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @system.limits + await @system.limits target: "#{tmpdir}/limits.conf" user: 'me' nofile: 2048 @@ -152,7 +151,7 @@ describe 'system.limits', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @fs.assert + await @fs.assert target: '/proc/sys/fs/file-max' {stdout: nofile} = await @execute command: 'cat /proc/sys/fs/file-max' @@ -168,7 +167,7 @@ describe 'system.limits', -> nofile: true nproc: true $status.should.be.true() - @fs.assert + await @fs.assert target: "#{tmpdir}/limits.conf" content: """ me - nofile #{nofile} @@ -177,15 +176,14 @@ describe 'system.limits', -> """ describe 'system values', -> - - return unless tags.system_limits + return unless test.tags.system_limits they 'raise an error if nofile is too high', ({ssh}) -> nikita $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @system.limits + await @system.limits target: "#{tmpdir}/limits.conf" user: 'me' nofile: 10000000000000000000 @@ -197,7 +195,7 @@ describe 'system.limits', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @system.limits + await @system.limits target: "#{tmpdir}/limits.conf" user: 'me' nproc: 1000000000 @@ -209,7 +207,7 @@ describe 'system.limits', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> - @system.limits + await @system.limits target: "#{tmpdir}/limits.conf" user: 'me' nproc: @@ -229,7 +227,7 @@ describe 'system.limits', -> nofile: 2048 nproc: 'unlimited' $status.should.be.true() - @fs.assert + await @fs.assert target: "#{tmpdir}/limits.conf" content: """ me - nofile 2048 diff --git a/packages/system/test/mod.coffee b/packages/system/test/mod.coffee index 6e79c28dd..5031bcd3b 100644 --- a/packages/system/test/mod.coffee +++ b/packages/system/test/mod.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.mod', -> + return unless test.tags.posix they 'activate a module', ({ssh}) -> nikita @@ -22,7 +22,7 @@ describe 'system.mod', -> modules: 'module_a' target: "#{tmpdir}/mods/modules.conf" $status.should.be.false() - @fs.assert + await @fs.assert target: "#{tmpdir}/mods/modules.conf" content: "module_a\n" @@ -45,7 +45,7 @@ describe 'system.mod', -> 'module_b': true target: "#{tmpdir}/mods/modules.conf" $status.should.be.false() - @fs.assert + await @fs.assert target: "#{tmpdir}/mods/modules.conf" content: "module_a\nmodule_b\n" @@ -69,6 +69,6 @@ describe 'system.mod', -> 'module_c': true target: "#{tmpdir}/mods/modules.conf" $status.should.be.true() - @fs.assert + await @fs.assert target: "#{tmpdir}/mods/modules.conf" content: "module_a\nmodule_c\n" diff --git a/packages/system/test/running.coffee b/packages/system/test/running.coffee index 5aa8d756d..711fdef3c 100644 --- a/packages/system/test/running.coffee +++ b/packages/system/test/running.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.running', -> + return unless test.tags.posix they 'pid not running', ({ssh}) -> {$logs, running} = await nikita({ssh: ssh}).system.running @@ -52,7 +52,7 @@ describe 'system.running', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @file + await @file target: "#{tmpdir}/pid.lock" content: '9999999' {$logs, running} = await @system.running diff --git a/packages/system/test/test.coffee b/packages/system/test/test.coffee index f17f2b1bb..ccc602447 100644 --- a/packages/system/test/test.coffee +++ b/packages/system/test/test.coffee @@ -1,14 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config +export default config.default diff --git a/packages/system/test/tmpfs.coffee b/packages/system/test/tmpfs.coffee index 313e19820..07b6de7e0 100644 --- a/packages/system/test/tmpfs.coffee +++ b/packages/system/test/tmpfs.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.system_tmpfs +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.tmpfs', -> + return unless test.tags.system_tmpfs describe 'generate without merge', -> @@ -26,12 +26,12 @@ describe 'system.tmpfs', -> merge: false $status.should.be.true() await @execute - command: " if [ -d \"/var/run/file_1\" ] ; then exit 0; else exit 1; fi" + command: 'if [ -d "/var/run/file_1" ] ; then exit 0; else exit 1; fi' await @fs.assert target: "#{tmpdir}/file_1.conf" - content: """ + content: ''' d /var/run/file_1 0644 root root 10s - - """ + ''' they 'status not modified', ({ssh, sudo}) -> nikita @@ -59,13 +59,13 @@ describe 'system.tmpfs', -> perm: '0644' merge: false $status.should.be.false() - @execute - command: " if [ -d \"/var/run/file_1\" ] ; then exit 0; else exit 1; fi" - @fs.assert + await @execute + command: 'if [ -d "/var/run/file_1" ] ; then exit 0; else exit 1; fi' + await @fs.assert target: "#{tmpdir}/file_1.conf" - content: """ + content: ''' d /var/run/file_1 0644 root root 10s - - """ + ''' they 'override existing configuration file with target', ({ssh, sudo}) -> nikita @@ -73,7 +73,7 @@ describe 'system.tmpfs', -> $sudo: sudo $tmpdir: true , ({metadata: {tmpdir}})-> - @fs.remove + await @fs.remove target: "#{tmpdir}/file_1.conf" {$status} = await @system.tmpfs target: "#{tmpdir}/file_1.conf" @@ -95,13 +95,13 @@ describe 'system.tmpfs', -> perm: '0644' merge: false $status.should.be.true() - @execute - command: " if [ -d \"/var/run/file_2\" ] ; then exit 0; else exit 1; fi" - @fs.assert + await @execute + command: 'if [ -d "/var/run/file_2" ] ; then exit 0; else exit 1; fi' + await @fs.assert target: "#{tmpdir}/file_1.conf" - content: """ + content: ''' d /var/run/file_2 0644 root root 10s - - """ + ''' describe 'generate with merge', -> @@ -111,7 +111,7 @@ describe 'system.tmpfs', -> $sudo: sudo $tmpdir: true , ({metadata: {tmpdir}})-> - @fs.remove + await @fs.remove target: "#{tmpdir}/file_2.conf" {$status} = await @system.tmpfs target: "#{tmpdir}/file_2.conf" @@ -133,19 +133,18 @@ describe 'system.tmpfs', -> perm: '0644' merge: true $status.should.be.true() - @execute - command: " if [ -d \"/var/run/file_1\" ] ; then exit 0; else exit 1; fi" - @execute - command: " if [ -d \"/var/run/file_2\" ] ; then exit 0; else exit 1; fi" - @fs.assert + await @execute + command: 'if [ -d "/var/run/file_1" ] ; then exit 0; else exit 1; fi' + await @execute + command: 'if [ -d "/var/run/file_2" ] ; then exit 0; else exit 1; fi' + await @fs.assert target: "#{tmpdir}/file_2.conf" - content: """ + content: ''' d /var/run/file_2 0644 root root 10s - d /var/run/file_1 0644 root root 10s - - """ + ''' they 'multiple file merge status not modifed with target', ({ssh, sudo}) -> - return if ssh nikita $ssh: ssh $sudo: sudo @@ -189,10 +188,10 @@ describe 'system.tmpfs', -> command: " if [ -d \"/var/run/file_2\" ] ; then exit 0; else exit 1; fi" await @fs.assert target: "#{tmpdir}/file_2.conf" - content: """ + content: ''' d /var/run/file_2 0644 root root 10s - d /var/run/file_1 0644 root root 10s - - """ + ''' describe 'default target Centos/Redhat 7', -> @@ -201,7 +200,7 @@ describe 'system.tmpfs', -> $ssh: ssh $sudo: sudo , -> - @fs.remove + await @fs.remove target: "/etc/tmpfiles.d/root.conf" {$status} = await @system.tmpfs mount: '/var/run/file_1' @@ -212,18 +211,18 @@ describe 'system.tmpfs', -> perm: '0644' merge: false $status.should.be.true() - @execute - command: " if [ -d \"/var/run/file_1\" ] ; then exit 0; else exit 1; fi" - @fs.assert + await @execute + command: 'if [ -d "/var/run/file_1" ] ; then exit 0; else exit 1; fi' + await @fs.assert target: '/etc/tmpfiles.d/root.conf' - content: "d /var/run/file_1 0644 root root 10s -" + content: 'd /var/run/file_1 0644 root root 10s -' they 'simple mount group no uid', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @fs.remove '/etc/tmpfiles.d/root.conf' + await @fs.remove '/etc/tmpfiles.d/root.conf' {$status} = await @system.tmpfs mount: '/var/run/file_1' uid: 'root' @@ -233,8 +232,8 @@ describe 'system.tmpfs', -> perm: '0644' merge: false $status.should.be.true() - @execute - command: " if [ -d \"/var/run/file_1\" ] ; then exit 0; else exit 1; fi" - @fs.assert + await @execute + command: 'if [ -d "/var/run/file_1" ] ; then exit 0; else exit 1; fi' + await @fs.assert target: '/etc/tmpfiles.d/root.conf' - content: "d /var/run/file_1 0644 root root 10s -" + content: 'd /var/run/file_1 0644 root root 10s -' diff --git a/packages/system/test/uid_gid.coffee b/packages/system/test/uid_gid.coffee index 689a365fb..c2b93de92 100644 --- a/packages/system/test/uid_gid.coffee +++ b/packages/system/test/uid_gid.coffee @@ -1,23 +1,23 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.system_user +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.uid_gid', -> + return unless test.tags.system_user they 'convert names to id', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @system.user.remove 'toto' - @system.group.remove ['toto', 'lulu'] - @system.group.remove 'lulu' - @system.group 'toto', gid: 1234 - @system.group 'lulu', gid: 1235 - @system.user 'toto', uid: 1234, gid: 1235 + await @system.user.remove 'toto' + await @system.group.remove ['toto', 'lulu'] + await @system.group.remove 'lulu' + await @system.group 'toto', gid: 1234 + await @system.group 'lulu', gid: 1235 + await @system.user 'toto', uid: 1234, gid: 1235 {uid, gid, default_gid} = await @system.uid_gid uid: 'toto' gid: 'toto' @@ -30,14 +30,14 @@ describe 'system.uid_gid', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @file + await @file target: "#{tmpdir}/etc/group" content: """ root:x:0:root bin:x:1:root,bin,daemon users:x:994:monsieur """ - @file + await @file target: "#{tmpdir}/etc/passwd" content: """ root:x:0:0:root:/root:/bin/bash diff --git a/packages/system/test/user/index.coffee b/packages/system/test/user/index.coffee index 035026ef2..d424ffee2 100644 --- a/packages/system/test/user/index.coffee +++ b/packages/system/test/user/index.coffee @@ -1,12 +1,13 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.user', -> describe 'schema', -> - return unless tags.api + return unless test.tags.api it 'config.shell', -> {shell} = await nikita.system.user @@ -25,15 +26,15 @@ describe 'system.user', -> shell.should.eql '/sbin/nologin' describe 'usage', -> - return unless tags.system_user + return unless test.tags.system_user they 'accept only user name', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @system.user.remove 'toto' - @system.group.remove 'toto' + await @system.user.remove 'toto' + await @system.group.remove 'toto' {$status} = await @system.user 'toto' $status.should.be.true() {$status} = await @system.user 'toto' @@ -44,8 +45,8 @@ describe 'system.user', -> $ssh: ssh $sudo: sudo , -> - @system.user.remove 'toto' - @system.group.remove 'toto' + await @system.user.remove 'toto' + await @system.group.remove 'toto' {$status} = await @system.user 'toto', uid: 1234 $status.should.be.true() {$status} = await @system.user 'toto', uid: 1235 @@ -60,8 +61,8 @@ describe 'system.user', -> $ssh: ssh $sudo: sudo , -> - @system.user.remove 'toto' - @system.group.remove 'toto' + await @system.user.remove 'toto' + await @system.group.remove 'toto' {$status} = await @system.user 'toto' $status.should.be.true() {$status} = await @system.user 'toto', uid: 1235 @@ -81,7 +82,7 @@ describe 'system.user', -> {$status} = await @system.user 'toto', home: "#{tmpdir}/toto/subdir" $status.should.be.true() - @fs.assert "#{tmpdir}/toto", + await @fs.assert "#{tmpdir}/toto", mode: [0o0644, 0o0755] uid: 0 gid: 0 diff --git a/packages/system/test/user/read.coffee b/packages/system/test/user/read.coffee index 11cbc08b4..268e269ed 100644 --- a/packages/system/test/user/read.coffee +++ b/packages/system/test/user/read.coffee @@ -1,20 +1,20 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.user.read', -> describe 'with option `target`', -> - return unless tags.posix + return unless test.tags.posix they 'shy doesnt modify the status', ({ssh}) -> nikita $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @file + await @file target: "#{tmpdir}/etc/passwd" content: """ root:x:0:0:root:/root:/bin/bash @@ -29,7 +29,7 @@ describe 'system.user.read', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @file + await @file target: "#{tmpdir}/etc/passwd" content: """ root:x:0:0:root:/root:/bin/bash @@ -42,16 +42,16 @@ describe 'system.user.read', -> bin: user: 'bin', uid: 1, gid: 1, comment: 'bin', home: '/bin', shell: '/usr/bin/nologin' describe 'without option `target`', -> - return unless tags.system_user + return unless test.tags.system_user they 'use `getent` without target', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @system.user.remove 'toto' - @system.group.remove 'toto' - @system.user + await @system.user.remove 'toto' + await @system.group.remove 'toto' + await @system.user name: 'toto' system: true uid: 1010 @@ -67,14 +67,14 @@ describe 'system.user.read', -> @system.group.remove 'toto' describe 'option "uid"', -> - return unless tags.posix + return unless test.tags.posix they 'map a username to group record', ({ssh}) -> nikita $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @file + await @file target: "#{tmpdir}/etc/passwd" content: """ root:x:0:root @@ -91,7 +91,7 @@ describe 'system.user.read', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @file + await @file target: "#{tmpdir}/etc/passwd" content: """ root:x:0:0:root:/root:/bin/bash @@ -108,13 +108,13 @@ describe 'system.user.read', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @file + await @file target: "#{tmpdir}/etc/passwd" content: """ root:x:0:root bin:x:1:root,bin,daemon """ - @system.user.read + await @system.user.read target: "#{tmpdir}/etc/passwd" uid: 'nobody' .should.be.rejectedWith @@ -125,13 +125,13 @@ describe 'system.user.read', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}})-> - @file + await @file target: "#{tmpdir}/etc/passwd" content: """ root:x:0:root bin:x:1:root,bin,daemon """ - @system.user.read + await @system.user.read target: "#{tmpdir}/etc/passwd" uid: '99' .should.be.rejectedWith diff --git a/packages/system/test/user/remove.coffee b/packages/system/test/user/remove.coffee index a28707fe5..3d4ce3ba9 100644 --- a/packages/system/test/user/remove.coffee +++ b/packages/system/test/user/remove.coffee @@ -1,13 +1,13 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'system.user.remove', -> describe 'schema', -> - - return unless tags.api + return unless test.tags.api it 'default argument', -> {config} = await nikita.system.user.remove 'toto', ({config}) -> @@ -15,17 +15,16 @@ describe 'system.user.remove', -> config.name.should.eql 'toto' describe 'usage', -> - - return unless tags.system_user + return unless test.tags.system_user they 'handle status', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - @system.user.remove 'toto' - @system.group.remove 'toto' - @system.user 'toto' + await @system.user.remove 'toto' + await @system.group.remove 'toto' + await @system.user 'toto' {$status} = await @system.user.remove 'toto' $status.should.be.true() {$status} = await @system.user.remove 'toto' diff --git a/packages/system/test/utils/tmpfs.coffee b/packages/system/test/utils/tmpfs.coffee new file mode 100644 index 000000000..1f9da9e29 --- /dev/null +++ b/packages/system/test/utils/tmpfs.coffee @@ -0,0 +1,54 @@ + +import { parse, stringify } from '@nikitajs/system/utils/tmpfs' +import test from '../test.coffee' + +describe 'system.utils.tmpfs', -> + return unless test.tags.api + + it 'parse', -> + parse ''' + d /run/user 0755 root root 10d - + L /tmp/foobar - - - - /dev/null + ''' + .should.eql + '/run/user': + type: 'd', + mount: '/run/user', + perm: '0755', + uid: 'root', + gid: 'root', + age: '10d', + argu: undefined + '/tmp/foobar': + type: 'L', + mount: '/tmp/foobar', + perm: undefined, + uid: undefined, + gid: undefined, + age: undefined, + argu: '/dev/null' + + it 'stringify', -> + stringify + '/run/user': + type: 'd', + mount: '/run/user', + perm: '0755', + uid: 'root', + gid: 'root', + age: '10d', + argu: undefined + '/tmp/foobar': + type: 'L', + mount: '/tmp/foobar', + perm: undefined, + uid: undefined, + gid: undefined, + age: undefined, + argu: '/dev/null' + .should.eql ''' + d /run/user 0755 root root 10d - + L /tmp/foobar - - - - /dev/null + ''' + + \ No newline at end of file diff --git a/packages/tools/README.md b/packages/tools/README.md index b4747b733..7cb3fcfc2 100644 --- a/packages/tools/README.md +++ b/packages/tools/README.md @@ -2,3 +2,17 @@ # Nikita "tools" package The "tools" package provides Nikita actions for various CLI tools. + +## Usage + +```js +import "@nikitajs/tools/register"; +import nikita from "@nikitajs/core"; + +const {$status} = await nikita.tools.git({ + source: "/tmp/super_project.git", + target: "/tmp/super_project", + revision: "v0.0.1", +}); +console.info("Repository was modified:", $status); +``` diff --git a/packages/tools/env/cron/Dockerfile b/packages/tools/env/cron/Dockerfile index 39dddfeac..3ef8f8caf 100644 --- a/packages/tools/env/cron/Dockerfile +++ b/packages/tools/env/cron/Dockerfile @@ -1,19 +1,17 @@ FROM almalinux:8 -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " # Install Node.js ENV NODE_VERSION stable -RUN \ - yum install -y git make \ - && curl -L https://git.io/n-install | bash -s -- -y \ - && . ~/.bashrc && n $NODE_VERSION +RUN yum install -y git make && \ + curl -L https://git.io/n-install | bash -s -- -y && \ + . ~/.bashrc && n $NODE_VERSION # Install SSH -RUN \ - yum install -y openssh-server openssh-clients \ - && ssh-keygen -t rsa -f ~/.ssh/id_rsa -N '' \ - && cat ~/.ssh/id_rsa.pub > ~/.ssh/authorized_keys \ - && ssh-keygen -A +RUN yum install -y openssh-server openssh-clients && \ + ssh-keygen -t rsa -f ~/.ssh/id_rsa -N '' && \ + cat ~/.ssh/id_rsa.pub > ~/.ssh/authorized_keys && \ + ssh-keygen -A # Install openssl # RUN yum install -y openssl diff --git a/packages/tools/env/cron/test.coffee b/packages/tools/env/cron/test.coffee index 52752714d..d2204faa5 100644 --- a/packages/tools/env/cron/test.coffee +++ b/packages/tools/env/cron/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: tools_cron: true config: [ diff --git a/packages/tools/env/dconf/Dockerfile b/packages/tools/env/dconf/Dockerfile index 57111d10c..9c9addff1 100644 --- a/packages/tools/env/dconf/Dockerfile +++ b/packages/tools/env/dconf/Dockerfile @@ -1,23 +1,22 @@ FROM ubuntu:22.04 -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " LABEL project=nikita \ - project.tests="tools.dconf" + project.tests="tools.dconf" # Install Node.js ENV NODE_VERSION stable -RUN \ - apt update -y \ - && apt install -y build-essential curl git \ - && curl -L https://git.io/n-install | bash -s -- -y \ - && /root/n/bin/n $NODE_VERSION +RUN apt update -y && \ + apt install -y build-essential curl git iputils-ping && \ + curl -L https://git.io/n-install | bash -s -- -y && \ + /root/n/bin/n $NODE_VERSION # Install SSH -RUN apt-get install -y openssh-server \ - && ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' \ - && cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys \ - && ssh-keygen -A \ - && mkdir -p /run/sshd +RUN apt-get install -y openssh-server && \ + ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys && \ + ssh-keygen -A && \ + mkdir -p /run/sshd # Install dbus-x11 required by dbus-launch in entrypoint.sh RUN apt install -y dbus-x11 diff --git a/packages/tools/env/dconf/test.coffee b/packages/tools/env/dconf/test.coffee index aec5589ea..545dea9cf 100644 --- a/packages/tools/env/dconf/test.coffee +++ b/packages/tools/env/dconf/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: tools_dconf: true config: [ diff --git a/packages/tools/env/iptables/index.coffee b/packages/tools/env/iptables/index.coffee deleted file mode 100644 index db8652013..000000000 --- a/packages/tools/env/iptables/index.coffee +++ /dev/null @@ -1,48 +0,0 @@ - -path = require 'path' -runner = require '@nikitajs/lxd-runner' - -runner - cwd: '/nikita/packages/tools' - container: 'nikita-tools-iptables' - logdir: path.resolve __dirname, './logs' - cluster: - containers: - 'nikita-tools-iptables': - image: 'images:almalinux/8' - properties: - 'environment.NIKITA_TEST_MODULE': '/nikita/packages/tools/env/iptables/test.coffee' - 'raw.idmap': if process.env['NIKITA_LXD_IN_VAGRANT'] - then 'both 1000 0' - else "both #{process.getuid()} 0" - disk: - nikitadir: - path: '/nikita' - source: process.env['NIKITA_HOME'] or path.join(__dirname, '../../../../') - ssh: enabled: true - provision_container: ({config}) -> - await @lxc.exec - $header: 'Node.js' - container: config.container - command: ''' - yum install -y tar - if command -v node ; then exit 42; fi - curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash - . ~/.bashrc - nvm install 16 - ''' - trap: true - code: [0, 42] - await @lxc.exec - $header: 'SSH keys' - container: config.container - command: """ - mkdir -p /root/.ssh && chmod 700 /root/.ssh - if [ ! -f /root/.ssh/id_ed25519 ]; then - ssh-keygen -t ed25519 -f /root/.ssh/id_ed25519 -N '' - cat /root/.ssh/id_ed25519.pub > /root/.ssh/authorized_keys - fi - """ - trap: true -.catch (err) -> - console.error err diff --git a/packages/tools/env/iptables/index.js b/packages/tools/env/iptables/index.js new file mode 100644 index 000000000..691eb5fd1 --- /dev/null +++ b/packages/tools/env/iptables/index.js @@ -0,0 +1,58 @@ + +import path from 'node:path'; +import dedent from 'dedent'; +import runner from '@nikitajs/lxd-runner'; +const dirname = new URL( '.', import.meta.url).pathname + +runner({ + cwd: '/nikita/packages/tools', + container: 'nikita-tools-iptables', + logdir: path.resolve(dirname, './logs'), + cluster: { + containers: { + 'nikita-tools-iptables': { + image: 'images:almalinux/8', + properties: { + 'environment.NIKITA_TEST_MODULE': '/nikita/packages/tools/env/iptables/test.coffee', + 'raw.idmap': process.env['NIKITA_LXD_IN_VAGRANT'] ? 'both 1000 0' : `both ${process.getuid()} 0` + }, + disk: { + nikitadir: { + path: '/nikita', + source: process.env['NIKITA_HOME'] || path.join(dirname, '../../../../') + } + }, + ssh: { + enabled: true + } + } + }, + provision_container: async function({config}) { + await this.lxc.exec({ + $header: 'Node.js', + container: config.container, + command: dedent` + yum install -y tar + if command -v node ; then exit 42; fi + curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash + . ~/.bashrc + nvm install 16 + `, + trap: true, + code: [0, 42] + }); + await this.lxc.exec({ + $header: 'SSH keys', + container: config.container, + command: dedent` + mkdir -p /root/.ssh && chmod 700 /root/.ssh + if [ ! -f /root/.ssh/id_ed25519 ]; then + ssh-keygen -t ed25519 -f /root/.ssh/id_ed25519 -N '' + cat /root/.ssh/id_ed25519.pub > /root/.ssh/authorized_keys + fi + `, + trap: true + }); + } + } +}); diff --git a/packages/tools/env/iptables/test.coffee b/packages/tools/env/iptables/test.coffee index 0a7270dc5..05c7b4afb 100644 --- a/packages/tools/env/iptables/test.coffee +++ b/packages/tools/env/iptables/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: tools_iptables: true config: [ @@ -7,6 +7,6 @@ module.exports = , label: 'remote' ssh: - host: '127.0.0.1', username: process.env.USER, + host: '127.0.0.1', username: process.env.USER private_key_path: '~/.ssh/id_ed25519' ] diff --git a/packages/tools/env/npm/index.coffee b/packages/tools/env/npm/index.coffee deleted file mode 100644 index 860c72d3e..000000000 --- a/packages/tools/env/npm/index.coffee +++ /dev/null @@ -1,48 +0,0 @@ - -path = require 'path' -runner = require '@nikitajs/lxd-runner' - -runner - cwd: '/nikita/packages/tools' - container: 'nikita-tools-npm' - logdir: path.resolve __dirname, './logs' - cluster: - containers: - 'nikita-tools-npm': - image: 'images:almalinux/8' - properties: - 'environment.NIKITA_TEST_MODULE': '/nikita/packages/tools/env/npm/test.coffee' - 'raw.idmap': if process.env['NIKITA_LXD_IN_VAGRANT'] - then 'both 1000 0' - else "both #{process.getuid()} 0" - disk: - nikitadir: - path: '/nikita' - source: process.env['NIKITA_HOME'] or path.join(__dirname, '../../../../') - ssh: enabled: true - provision_container: ({config}) -> - await @lxc.exec - $header: 'Node.js' - container: config.container - command: ''' - yum install -y tar - if command -v node ; then exit 42; fi - curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash - . ~/.bashrc - nvm install 20 - ''' - trap: true - code: [0, 42] - await @lxc.exec - $header: 'SSH keys' - container: config.container - command: """ - mkdir -p /root/.ssh && chmod 700 /root/.ssh - if [ ! -f /root/.ssh/id_ed25519 ]; then - ssh-keygen -t ed25519 -f /root/.ssh/id_ed25519 -N '' - cat /root/.ssh/id_ed25519.pub > /root/.ssh/authorized_keys - fi - """ - trap: true -.catch (err) -> - console.error err diff --git a/packages/tools/env/npm/index.js b/packages/tools/env/npm/index.js new file mode 100644 index 000000000..1f5041cfb --- /dev/null +++ b/packages/tools/env/npm/index.js @@ -0,0 +1,58 @@ + +import path from 'node:path'; +import dedent from 'dedent'; +import runner from '@nikitajs/lxd-runner'; +const __dirname = new URL( '.', import.meta.url).pathname + +runner({ + cwd: '/nikita/packages/tools', + container: 'nikita-tools-npm', + logdir: path.resolve(__dirname, './logs'), + cluster: { + containers: { + 'nikita-tools-npm': { + image: 'images:almalinux/8', + properties: { + 'environment.NIKITA_TEST_MODULE': '/nikita/packages/tools/env/npm/test.coffee', + 'raw.idmap': process.env['NIKITA_LXD_IN_VAGRANT'] ? 'both 1000 0' : `both ${process.getuid()} 0` + }, + disk: { + nikitadir: { + path: '/nikita', + source: process.env['NIKITA_HOME'] || path.join(__dirname, '../../../../') + } + }, + ssh: { + enabled: true + } + } + }, + provision_container: async function({config}) { + await this.lxc.exec({ + $header: 'Node.js', + container: config.container, + command: dedent` + yum install -y tar + if command -v node ; then exit 42; fi + curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash + . ~/.bashrc + nvm install 20 + `, + trap: true, + code: [0, 42] + }); + await this.lxc.exec({ + $header: 'SSH keys', + container: config.container, + command: dedent` + mkdir -p /root/.ssh && chmod 700 /root/.ssh + if [ ! -f /root/.ssh/id_ed25519 ]; then + ssh-keygen -t ed25519 -f /root/.ssh/id_ed25519 -N '' + cat /root/.ssh/id_ed25519.pub > /root/.ssh/authorized_keys + fi + `, + trap: true + }); + } + } +}); diff --git a/packages/tools/env/npm/test.coffee b/packages/tools/env/npm/test.coffee index 4d793782b..cacf152f5 100644 --- a/packages/tools/env/npm/test.coffee +++ b/packages/tools/env/npm/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: tools_npm: true config: [ @@ -7,6 +7,6 @@ module.exports = , label: 'remote' ssh: - host: '127.0.0.1', username: process.env.USER, + host: '127.0.0.1', username: process.env.USER private_key_path: '~/.ssh/id_ed25519' ] diff --git a/packages/tools/env/repo-alma8/Dockerfile b/packages/tools/env/repo-alma8/Dockerfile index 9807f1add..50993ec3a 100644 --- a/packages/tools/env/repo-alma8/Dockerfile +++ b/packages/tools/env/repo-alma8/Dockerfile @@ -1,15 +1,15 @@ FROM almalinux:8 -MAINTAINER David Worms +LABEL org.opencontainers.image.authors="David Worms " RUN \ - # Install Node dependencies - yum install -y git make && \ - # Install SSH and sudo - yum install -y openssh-server openssh-clients sudo && \ - ssh-keygen -A && \ - # Install package dependencies - # java - yum install -y openssl git zip unzip bzip2 + # Install Node dependencies + yum install -y git make && \ + # Install SSH and sudo + yum install -y openssh-server openssh-clients sudo && \ + ssh-keygen -A && \ + # Install package dependencies + # java + yum install -y openssl git zip unzip bzip2 RUN yum clean all @@ -19,10 +19,10 @@ WORKDIR /nikita/packages/tools # Sudo User RUN useradd nikita -d /home/nikita && \ - mkdir -p /home/nikita && \ - chown nikita /home/nikita && \ - chmod 700 /home/nikita && \ - echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita USER nikita # Install Node.js @@ -32,7 +32,7 @@ RUN \ . ~/.bashrc && n $NODE_VERSION RUN \ - ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ - cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys + ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/tools/env/repo-alma8/docker-compose.yml b/packages/tools/env/repo-alma8/docker-compose.yml index 903cd895b..6b0911f48 100644 --- a/packages/tools/env/repo-alma8/docker-compose.yml +++ b/packages/tools/env/repo-alma8/docker-compose.yml @@ -2,9 +2,9 @@ services: nodejs: build: . - image: nikita_tools_centos7 - container_name: nikita_tools_centos7 - platform: linux/amd64 # MongoDB repo used in tests only available on x64 arch + image: nikita_tools_repo_alma8 + container_name: nikita_tools_repo_alma8 + # platform: linux/amd64 # MongoDB repo used in tests only available on x64 arch volumes: - ../../../../:/nikita environment: diff --git a/packages/tools/env/repo-alma8/test.coffee b/packages/tools/env/repo-alma8/test.coffee index 5fa124ab9..61fb9f3fa 100644 --- a/packages/tools/env/repo-alma8/test.coffee +++ b/packages/tools/env/repo-alma8/test.coffee @@ -1,7 +1,10 @@ -module.exports = +export default tags: tools_repo: true + mariadb: + distrib: 'almalinux8' + basearch: '$arch' config: [ label: 'local' sudo: true diff --git a/packages/tools/env/repo-rocky9/Dockerfile b/packages/tools/env/repo-rocky9/Dockerfile new file mode 100644 index 000000000..d7533f2b8 --- /dev/null +++ b/packages/tools/env/repo-rocky9/Dockerfile @@ -0,0 +1,38 @@ +FROM rockylinux:9 +LABEL org.opencontainers.image.authors="David Worms " + +RUN \ + # Install Node dependencies + yum install -y git make && \ + # Install SSH and sudo + yum install -y openssh-server openssh-clients sudo && \ + ssh-keygen -A && \ + # Install package dependencies + # java + yum install -y openssl git zip unzip bzip2 + +RUN yum clean all + +ADD ./entrypoint.sh /entrypoint.sh +RUN mkdir -p /nikita +WORKDIR /nikita/packages/tools + +# Sudo User +RUN useradd nikita -d /home/nikita && \ + mkdir -p /home/nikita && \ + chown nikita /home/nikita && \ + chmod 700 /home/nikita && \ + echo 'nikita ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nikita +USER nikita + +# Install Node.js +ENV NODE_VERSION stable +RUN \ + curl -L https://git.io/n-install | bash -s -- -y && \ + . ~/.bashrc && n $NODE_VERSION + +RUN \ + ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -N '' && \ + cat ~/.ssh/id_ed25519.pub > ~/.ssh/authorized_keys + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/tools/env/repo-rocky9/docker-compose.yml b/packages/tools/env/repo-rocky9/docker-compose.yml new file mode 100644 index 000000000..baaf6b8f4 --- /dev/null +++ b/packages/tools/env/repo-rocky9/docker-compose.yml @@ -0,0 +1,11 @@ + +services: + nodejs: + build: . + image: nikita_tools_repo_rocky9 + container_name: nikita_tools_repo_rocky9 + # platform: linux/amd64 # MongoDB repo used in tests only available on x64 arch + volumes: + - ../../../../:/nikita + environment: + NIKITA_TEST_MODULE: /nikita/packages/tools/env/repo-rocky9/test.coffee diff --git a/packages/service/env/ubuntu/entrypoint.sh b/packages/tools/env/repo-rocky9/entrypoint.sh similarity index 100% rename from packages/service/env/ubuntu/entrypoint.sh rename to packages/tools/env/repo-rocky9/entrypoint.sh diff --git a/packages/tools/env/repo-rocky9/run.sh b/packages/tools/env/repo-rocky9/run.sh new file mode 100755 index 000000000..3eaa8bd58 --- /dev/null +++ b/packages/tools/env/repo-rocky9/run.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +cd `pwd`/`dirname ${BASH_SOURCE}` +docker compose up --abort-on-container-exit diff --git a/packages/tools/env/repo-rocky9/test.coffee b/packages/tools/env/repo-rocky9/test.coffee new file mode 100644 index 000000000..5e61ec008 --- /dev/null +++ b/packages/tools/env/repo-rocky9/test.coffee @@ -0,0 +1,17 @@ + +export default + tags: + tools_repo: true + mariadb: + distrib: 'almalinux9' + basearch: '$arch' + config: [ + label: 'local' + sudo: true + , + label: 'remote' + sudo: true + ssh: + host: '127.0.0.1', username: process.env.USER + private_key_path: '~/.ssh/id_ed25519' + ] diff --git a/packages/tools/env/rubygems/index.coffee b/packages/tools/env/rubygems/index.coffee deleted file mode 100644 index d9e5f1296..000000000 --- a/packages/tools/env/rubygems/index.coffee +++ /dev/null @@ -1,56 +0,0 @@ - -path = require 'path' -runner = require '@nikitajs/lxd-runner' - -runner - cwd: '/nikita/packages/tools' - container: 'nikita-tools-rubygems' - logdir: path.resolve __dirname, './logs' - cluster: - containers: - 'nikita-tools-rubygems': - image: 'images:almalinux/8' - properties: - 'environment.NIKITA_TEST_MODULE': '/nikita/packages/tools/env/rubygems/test.coffee' - 'raw.idmap': if process.env['NIKITA_LXD_IN_VAGRANT'] - then 'both 1000 0' - else "both #{process.getuid()} 0" - disk: - nikitadir: - path: '/nikita' - source: process.env['NIKITA_HOME'] or path.join(__dirname, '../../../../') - ssh: enabled: true - provision_container: ({config}) -> - await @lxc.exec - $header: 'Node.js' - container: config.container - command: ''' - yum install -y tar - if command -v node ; then exit 42; fi - curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash - . ~/.bashrc - nvm install 16 - ''' - trap: true - code: [0, 42] - await @lxc.exec - $header: 'SSH keys' - container: config.container - command: """ - mkdir -p /root/.ssh && chmod 700 /root/.ssh - if [ ! -f /root/.ssh/id_rsa ]; then - ssh-keygen -t rsa -f /root/.ssh/id_rsa -N '' - cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys - fi - """ - trap: true - await @lxc.exec - $header: 'Ruby' - container: config.container - command: """ - yum install -y gcc ruby ruby-devel - """ - trap: true - code: [0, 42] -.catch (err) -> - console.error err diff --git a/packages/tools/env/rubygems/index.js b/packages/tools/env/rubygems/index.js new file mode 100644 index 000000000..1fa55a72c --- /dev/null +++ b/packages/tools/env/rubygems/index.js @@ -0,0 +1,65 @@ + +import path from 'node:path'; +import dedent from 'dedent'; +import runner from '@nikitajs/lxd-runner'; +const __dirname = new URL( '.', import.meta.url).pathname + +runner({ + cwd: '/nikita/packages/tools', + container: 'nikita-tools-rubygems', + logdir: path.resolve(__dirname, './logs'), + cluster: { + containers: { + 'nikita-tools-rubygems': { + image: 'images:almalinux/8', + properties: { + 'environment.NIKITA_TEST_MODULE': '/nikita/packages/tools/env/rubygems/test.coffee', + 'raw.idmap': process.env['NIKITA_LXD_IN_VAGRANT'] ? 'both 1000 0' : `both ${process.getuid()} 0` + }, + disk: { + nikitadir: { + path: '/nikita', + source: process.env['NIKITA_HOME'] || path.join(__dirname, '../../../../') + } + }, + ssh: { + enabled: true + } + } + }, + provision_container: async function({config}) { + await this.lxc.exec({ + $header: 'Node.js', + container: config.container, + command: dedent` + yum install -y tar + if command -v node ; then exit 42; fi + curl -sS -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash + . ~/.bashrc + nvm install 16 + `, + trap: true, + code: [0, 42] + }); + await this.lxc.exec({ + $header: 'SSH keys', + container: config.container, + command: dedent` + mkdir -p /root/.ssh && chmod 700 /root/.ssh + if [ ! -f /root/.ssh/id_rsa ]; then + ssh-keygen -t rsa -f /root/.ssh/id_rsa -N '' + cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys + fi + `, + trap: true + }); + await this.lxc.exec({ + $header: 'Ruby', + container: config.container, + command: `yum install -y gcc ruby ruby-devel`, + trap: true, + code: [0, 42] + }); + } + } +}); diff --git a/packages/tools/env/rubygems/test.coffee b/packages/tools/env/rubygems/test.coffee index a4a078e74..af776342b 100644 --- a/packages/tools/env/rubygems/test.coffee +++ b/packages/tools/env/rubygems/test.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: tools_rubygems: true config: [ @@ -7,6 +7,6 @@ module.exports = , label: 'remote' ssh: - host: '127.0.0.1', username: process.env.USER, + host: '127.0.0.1', username: process.env.USER private_key_path: '~/.ssh/id_rsa' ] diff --git a/packages/tools/env/run.sh b/packages/tools/env/run.sh index bf45213a0..e4b1834ad 100755 --- a/packages/tools/env/run.sh +++ b/packages/tools/env/run.sh @@ -5,7 +5,7 @@ cd `pwd`/`dirname ${BASH_SOURCE}` ./cron/run.sh ./dconf/run.sh -# ./repo-centos/run.sh -npx coffee ./env/iptables/index.coffee run -npx coffee ./env/npm/index.coffee run -npx coffee ./env/rubygems/index.coffee run +./repo-alma8/run.sh +node ./iptables/index.js run +node ./npm/index.js run +node ./rubygems/index.js run diff --git a/packages/tools/lib/backup/index.js b/packages/tools/lib/backup/index.js index 671ffaf9c..6fe93b6bc 100644 --- a/packages/tools/lib/backup/index.js +++ b/packages/tools/lib/backup/index.js @@ -1,16 +1,14 @@ - // Dependencies -const definitions = require("./schema.json"); -const dayjs = require('dayjs'); -dayjs.extend(require('dayjs/plugin/utc')); -dayjs.extend(require('dayjs/plugin/timezone')); +import dayjs from "dayjs"; +import dayjsUtc from "dayjs/plugin/utc.js"; +import dayjsTimezone from "dayjs/plugin/timezone.js"; +import definitions from "./schema.json" assert { type: "json" }; +dayjs.extend(dayjsUtc); +dayjs.extend(dayjsTimezone); // Action -module.exports = { - handler: async function({ - config, - tools: {log, path} - }) { +export default { + handler: async function ({ config, tools: { log, path } }) { let filename = dayjs(); if (config.local) { filename = filename.locale(config.locale); @@ -25,48 +23,42 @@ module.exports = { } else { filename = filename.toISOString(); } - const compress = config.compress === true ? 'tgz' : config.compress; + const compress = config.compress === true ? "tgz" : config.compress; if (compress) { filename = `${filename}.${compress}`; } const target = `${config.target}/${config.name}/${filename}`; - log({ - message: `Source is ${JSON.stringify(config.source)}`, - level: 'INFO' - }); - log({ - message: `Target is ${JSON.stringify(target)}`, - level: 'INFO' - }); + log("INFO", `Source is ${JSON.stringify(config.source)}`); + log("INFO", `Target is ${JSON.stringify(target)}`); await this.fs.mkdir(`${path.dirname(target)}`); if (config.source && !config.compress) { await this.fs.copy({ source: `${config.source}`, - target: `${target}` + target: `${target}`, }); } if (config.source && config.compress) { await this.tools.compress({ format: `${compress}`, source: `${config.source}`, - target: `${target}` + target: `${target}`, }); } if (config.command) { await this.execute({ - command: `${config.command} > ${target}` + command: `${config.command} > ${target}`, }); } return { base_dir: config.target, name: config.name, filename: filename, - target: target + target: target, }; }, metadata: { - definitions: definitions - } + definitions: definitions, + }, }; // ## Dependencies diff --git a/packages/tools/lib/backup/schema.json b/packages/tools/lib/backup/schema.json index 94efc4545..08df474d3 100644 --- a/packages/tools/lib/backup/schema.json +++ b/packages/tools/lib/backup/schema.json @@ -21,7 +21,7 @@ "compress": { "oneOf": [ { - "$ref": "module://@nikitajs/tools/lib/compress#/definitions/config/properties/format" + "$ref": "module://@nikitajs/tools/compress#/definitions/config/properties/format" }, { "type": "boolean" diff --git a/packages/tools/lib/compress/index.js b/packages/tools/lib/compress/index.js index 228d1c148..db805b2fe 100644 --- a/packages/tools/lib/compress/index.js +++ b/packages/tools/lib/compress/index.js @@ -1,7 +1,7 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; /** ## Extention to type @@ -26,7 +26,7 @@ const ext_to_type = function(name, path) { }; // Action -module.exports = { +export default { handler: async function({ config, tools: {path} diff --git a/packages/tools/lib/cron/add/index.js b/packages/tools/lib/cron/add/index.js index 296caf055..af109c55e 100644 --- a/packages/tools/lib/cron/add/index.js +++ b/packages/tools/lib/cron/add/index.js @@ -1,10 +1,10 @@ // Dependencies -const dedent = require("dedent"); -const utils = require("../../utils"); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import utils from "@nikitajs/tools/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { const crontab = (() => { if (config.user != null) { diff --git a/packages/tools/lib/cron/remove/index.js b/packages/tools/lib/cron/remove/index.js index 73c0b0223..cc1b7f83f 100644 --- a/packages/tools/lib/cron/remove/index.js +++ b/packages/tools/lib/cron/remove/index.js @@ -1,10 +1,10 @@ // Dependencies -const dedent = require('dedent'); -const utils = require('../../utils'); -const definitions = require('./schema.json'); +import dedent from "dedent"; +import utils from "@nikitajs/tools/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({ config, tools: {log} diff --git a/packages/tools/lib/dconf/index.js b/packages/tools/lib/dconf/index.js index 036e9f810..000efd905 100644 --- a/packages/tools/lib/dconf/index.js +++ b/packages/tools/lib/dconf/index.js @@ -1,10 +1,10 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // ## Exports -module.exports = { +export default { handler: async function({config}) { // Normalize properties for (const key in config.properties) { diff --git a/packages/tools/lib/extract/index.js b/packages/tools/lib/extract/index.js index 6b85026e7..cfb9100f7 100644 --- a/packages/tools/lib/extract/index.js +++ b/packages/tools/lib/extract/index.js @@ -1,10 +1,10 @@ // Dependencies -const definitions = require("./schema.json"); -const utils = require('../utils'); +import definitions from "./schema.json" assert { type: "json" }; +import utils from "@nikitajs/tools/utils"; // Action -module.exports = { +export default { handler: async function({ config, tools: {log, path} diff --git a/packages/tools/lib/git/index.js b/packages/tools/lib/git/index.js index c4dc2d50c..2ee22951b 100644 --- a/packages/tools/lib/git/index.js +++ b/packages/tools/lib/git/index.js @@ -1,10 +1,10 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({ config, tools: {path} diff --git a/packages/tools/lib/gsettings/index.js b/packages/tools/lib/gsettings/index.js index ae7ef6d12..44fb9c24c 100644 --- a/packages/tools/lib/gsettings/index.js +++ b/packages/tools/lib/gsettings/index.js @@ -1,10 +1,10 @@ // Dependencies -const dedent = require("dedent"); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (config.argument != null) { config.properties = config.argument; diff --git a/packages/tools/lib/iptables/README.md b/packages/tools/lib/iptables/README.md index 2dcac61e1..a2b66acb5 100644 --- a/packages/tools/lib/iptables/README.md +++ b/packages/tools/lib/iptables/README.md @@ -13,7 +13,7 @@ Iptables rules are only inserted if the service is started on the target system. ## Usage -Iptables comes with many modules. Each of them which must be specifically +Iptables comes with many modules. Each of them must be specifically integrated to the parser part of this code. For this reason, we could only integrate a limited set of modules and more are added based on usages. Supported modules are: @@ -35,7 +35,7 @@ modules are: ## Example ```js -var after = {chain: 'INPUT', jump: 'ACCEPT', 'in-interface': 'lo'} +const after = {chain: 'INPUT', jump: 'ACCEPT', 'in-interface': 'lo'} const {$status} = await nikita.tools.iptables({ rules: [ chain: 'INPUT', after: after, jump: 'ACCEPT', dport: 22, protocol: 'tcp' @@ -43,3 +43,8 @@ const {$status} = await nikita.tools.iptables({ }) console.info(`Iptables was updated: ${$status}`) ``` + +## Command references + +List rules in readable format: `iptables -L --line-numbers -nv` +List rules in save format: `iptables -S -v` diff --git a/packages/tools/lib/iptables/index.js b/packages/tools/lib/iptables/index.js index 3ef64fb65..67f0944ca 100644 --- a/packages/tools/lib/iptables/index.js +++ b/packages/tools/lib/iptables/index.js @@ -1,71 +1,43 @@ - // Dependencies -const definitions = require("./schema.json"); - -// ## Hooks -var handler, on_action, utils; - -on_action = function({config}) { - if (!Array.isArray(config.rules)) { - return config.rules = [config.rules]; - } -}; - -// ## Schema definitions - -// ## Handler -handler = async function({ - config, - tools: {log} - }) { - var $status, command, newrules, oldrules, stdout; - log({ - message: "List existing rules", - level: 'WARN' - }); - ({$status} = (await this.service.status({ - name: 'iptables' - }))); - if (!$status) { - throw Error("Service iptables not started"); - } - ({stdout} = (await this.execute({ - $shy: true, - command: 'iptables -S', - sudo: config.sudo - }))); - oldrules = utils.iptables.parse(stdout); - newrules = utils.iptables.normalize(config.rules); - command = utils.iptables.command(oldrules, newrules); - if (!command.length) { - return; - } - log({ - message: `${command.length} modified rules`, - level: 'WARN' - }); - return (await this.execute({ - command: `${command.join('; ')}; service iptables save;`, - sudo: config.sudo, - trap: true - })); -}; - -// ## Exports -module.exports = { - handler: handler, +import utils from "@nikitajs/tools/utils"; +import definitions from "./schema.json" assert { type: "json" }; + +// Action +export default { + handler: async function ({ config, tools: { log } }) { + log("WARN", "List existing rules"); + const { started } = await this.service.status({ + name: "iptables", + }); + if (!started) { + throw Error("Service iptables not started"); + } + const { stdout } = await this.execute({ + $shy: true, + command: "iptables -S", + sudo: config.sudo, + }); + const oldrules = utils.iptables.parse(stdout); + const newrules = utils.iptables.normalize(config.rules); + const command = utils.iptables.command(oldrules, newrules); + if (!command.length) { + return; + } + log("WARN", `${command.length} modified rules`); + await this.execute({ + command: `${command.join("; ")}; service iptables save;`, + sudo: config.sudo, + trap: true, + }); + }, hooks: { - on_action: on_action + on_action: function ({ config }) { + if (!Array.isArray(config.rules)) { + return (config.rules = [config.rules]); + } + }, }, metadata: { - definitions: definitions - } + definitions: definitions, + }, }; - -// ## Dependencies -utils = require('../utils'); - -// ## IPTables References - -// List rules in readable format: `iptables -L --line-numbers -nv` -// List rules in save format: `iptables -S -v` diff --git a/packages/tools/lib/iptables/schema.json b/packages/tools/lib/iptables/schema.json index 8ac5f679d..40cb964f4 100644 --- a/packages/tools/lib/iptables/schema.json +++ b/packages/tools/lib/iptables/schema.json @@ -10,7 +10,7 @@ "description": "One or more objects containing iptables rule definitions." }, "sudo": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/sudo" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/sudo" } }, "required": [ diff --git a/packages/tools/lib/npm/index.js b/packages/tools/lib/npm/index.js index f2facd957..6cbc1a89f 100644 --- a/packages/tools/lib/npm/index.js +++ b/packages/tools/lib/npm/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { // Upgrade await this.tools.npm.upgrade({ diff --git a/packages/tools/lib/npm/list/index.js b/packages/tools/lib/npm/list/index.js index 85f1f9840..7f010924f 100644 --- a/packages/tools/lib/npm/list/index.js +++ b/packages/tools/lib/npm/list/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {stdout} = await this.execute({ command: ['npm list', '--json', config.global ? '--global' : void 0].join(' '), diff --git a/packages/tools/lib/npm/list/schema.json b/packages/tools/lib/npm/list/schema.json index 1077f0e95..2293f2118 100644 --- a/packages/tools/lib/npm/list/schema.json +++ b/packages/tools/lib/npm/list/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "cwd": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/cwd" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/cwd" }, "global": { "type": "boolean", diff --git a/packages/tools/lib/npm/outdated/index.js b/packages/tools/lib/npm/outdated/index.js index 4f3411b57..13b677667 100644 --- a/packages/tools/lib/npm/outdated/index.js +++ b/packages/tools/lib/npm/outdated/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require('./schema.json'); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { const {stdout} = (await this.execute({ command: ['npm outdated', '--json', config.global ? '--global' : void 0].join(' '), diff --git a/packages/tools/lib/npm/outdated/schema.json b/packages/tools/lib/npm/outdated/schema.json index 1077f0e95..2293f2118 100644 --- a/packages/tools/lib/npm/outdated/schema.json +++ b/packages/tools/lib/npm/outdated/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "cwd": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/cwd" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/cwd" }, "global": { "type": "boolean", diff --git a/packages/tools/lib/npm/schema.json b/packages/tools/lib/npm/schema.json index d4ca052b8..a2c2e50d2 100644 --- a/packages/tools/lib/npm/schema.json +++ b/packages/tools/lib/npm/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "cwd": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/cwd" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/cwd" }, "global": { "type": "boolean", @@ -18,7 +18,7 @@ "description": "Name of the package(s) to install or upgrade if config \"upgrade\" is\n\"true\"." }, "sudo": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/sudo" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/sudo" }, "upgrade": { "default": false, diff --git a/packages/tools/lib/npm/uninstall/index.js b/packages/tools/lib/npm/uninstall/index.js index 04d65b89a..179fa6b5e 100644 --- a/packages/tools/lib/npm/uninstall/index.js +++ b/packages/tools/lib/npm/uninstall/index.js @@ -1,47 +1,38 @@ // Dependencies -const definitions = require('./schema.json'); - - -// ## Handler -handler = async function({ - config, - tools: {log} - }) { - const global = config.global ? '-g' : ''; - // Get installed packages - let installed = []; - const {stdout} = (await this.execute({ - $shy: true, - command: `npm list --json ${global}`, - code: [0, 1], - cwd: config.cwd, - stdout_log: false - })); - const pkgs = JSON.parse(stdout); - if (Object.keys(pkgs).length) { - installed = Object.keys(pkgs.dependencies); - } - // Uninstall - const uninstall = config.name.filter((pkg) => - installed.includes(pkg) - ); - if (!uninstall.length) { - return; - } - await this.execute({ - command: `npm uninstall ${global} ${uninstall.join(' ')}`, - cwd: config.cwd - }); - log({ - message: `NPM uninstalled packages: ${uninstall.join(', ')}` - }); -}; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { - handler: handler, +export default { + handler: async function ({ config, tools: { log } }) { + const global = config.global ? "-g" : ""; + // Get installed packages + let installed = []; + const { stdout } = await this.execute({ + $shy: true, + command: `npm list --json ${global}`, + code: [0, 1], + cwd: config.cwd, + stdout_log: false, + }); + const pkgs = JSON.parse(stdout); + if (Object.keys(pkgs).length) { + installed = Object.keys(pkgs.dependencies); + } + // Uninstall + const uninstall = config.name.filter((pkg) => installed.includes(pkg)); + if (!uninstall.length) { + return; + } + await this.execute({ + command: `npm uninstall ${global} ${uninstall.join(" ")}`, + cwd: config.cwd, + }); + log({ + message: `NPM uninstalled packages: ${uninstall.join(", ")}`, + }); + }, metadata: { - argument_to_config: 'name', - definitions: definitions - } + argument_to_config: "name", + definitions: definitions, + }, }; diff --git a/packages/tools/lib/npm/uninstall/schema.json b/packages/tools/lib/npm/uninstall/schema.json index 74646eae4..7ec5791e4 100644 --- a/packages/tools/lib/npm/uninstall/schema.json +++ b/packages/tools/lib/npm/uninstall/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "cwd": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/cwd" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/cwd" }, "name": { "type": "array", diff --git a/packages/tools/lib/npm/upgrade/index.js b/packages/tools/lib/npm/upgrade/index.js index 0daac1e86..7eaeb2622 100644 --- a/packages/tools/lib/npm/upgrade/index.js +++ b/packages/tools/lib/npm/upgrade/index.js @@ -1,8 +1,8 @@ // Dependencies -const definitions = require("./schema.json"); +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { log } }) { // Get outdated packages const { packages } = await this.tools.npm.outdated({ @@ -10,8 +10,8 @@ module.exports = { global: config.global, }); let outdated = Object.keys(packages).filter((name) => { - const package = packages[name]; - return package.current !== package.wanted; + const pck = packages[name]; + return pck.current !== pck.wanted; }); if (config.name) { const names = config.name.map((name) => name.split("@")[0]); diff --git a/packages/tools/lib/npm/upgrade/schema.json b/packages/tools/lib/npm/upgrade/schema.json index a21472ac3..176ce6a84 100644 --- a/packages/tools/lib/npm/upgrade/schema.json +++ b/packages/tools/lib/npm/upgrade/schema.json @@ -3,7 +3,7 @@ "type": "object", "properties": { "cwd": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/cwd" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/cwd" }, "global": { "type": "boolean", diff --git a/packages/tools/lib/register.js b/packages/tools/lib/register.js index 08907b4de..b6aad7da4 100644 --- a/packages/tools/lib/register.js +++ b/packages/tools/lib/register.js @@ -1,47 +1,40 @@ // Dependencies -const registry = require('@nikitajs/core/lib/registry'); -require('@nikitajs/file/lib/register'); -require('@nikitajs/service/lib/register'); +import registry from "@nikitajs/core/registry"; +import '@nikitajs/file/register'; +import '@nikitajs/service/register'; // Action registration -module.exports = { +const actions = { tools: { - backup: '@nikitajs/tools/lib/backup', - compress: '@nikitajs/tools/lib/compress', + backup: '@nikitajs/tools/backup', + compress: '@nikitajs/tools/compress', cron: { - add: '@nikitajs/tools/lib/cron/add', - remove: '@nikitajs/tools/lib/cron/remove' + add: '@nikitajs/tools/cron/add', + remove: '@nikitajs/tools/cron/remove' }, - extract: '@nikitajs/tools/lib/extract', - dconf: '@nikitajs/tools/lib/dconf', - iptables: '@nikitajs/tools/lib/iptables', - git: '@nikitajs/tools/lib/git', + extract: '@nikitajs/tools/extract', + dconf: '@nikitajs/tools/dconf', + iptables: '@nikitajs/tools/iptables', + git: '@nikitajs/tools/git', npm: { - '': '@nikitajs/tools/lib/npm', - list: '@nikitajs/tools/lib/npm/list', - outdated: '@nikitajs/tools/lib/npm/outdated', - uninstall: '@nikitajs/tools/lib/npm/uninstall', - upgrade: '@nikitajs/tools/lib/npm/upgrade' + '': '@nikitajs/tools/npm', + list: '@nikitajs/tools/npm/list', + outdated: '@nikitajs/tools/npm/outdated', + uninstall: '@nikitajs/tools/npm/uninstall', + upgrade: '@nikitajs/tools/npm/upgrade' }, - repo: '@nikitajs/tools/lib/repo', + repo: '@nikitajs/tools/repo', rubygems: { - 'fetch': '@nikitajs/tools/lib/rubygems/fetch', - 'install': '@nikitajs/tools/lib/rubygems/install', - 'remove': '@nikitajs/tools/lib/rubygems/remove' + 'fetch': '@nikitajs/tools/rubygems/fetch', + 'install': '@nikitajs/tools/rubygems/install', + 'remove': '@nikitajs/tools/rubygems/remove' }, ssh: { - keygen: '@nikitajs/tools/lib/ssh/keygen' + keygen: '@nikitajs/tools/ssh/keygen' }, - sysctl: '@nikitajs/tools/lib/sysctl' + sysctl: '@nikitajs/tools/sysctl' } }; -(async function() { - try { - return (await registry.register(module.exports)); - } catch (error) { - console.error(error.stack); - return process.exit(1); - } -})(); +await registry.register(actions); diff --git a/packages/tools/lib/repo/index.js b/packages/tools/lib/repo/index.js index 07af80374..09f393364 100644 --- a/packages/tools/lib/repo/index.js +++ b/packages/tools/lib/repo/index.js @@ -1,58 +1,42 @@ - // Dependencies -const dedent = require('dedent'); -const url = require('url'); -const utils = require('@nikitajs/file/lib/utils'); -const definitions = require("./schema.json"); +import each from "each"; +import dedent from "dedent"; +import url from "node:url"; +import utils from "@nikitajs/file/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { - handler: async function({ - config, - ssh, - tools: {log, path} - }) { - // Config normalisation - if (config.source != null) { - // TODO wdavidw 180115, target should be mandatory and not default to the source filename - if (config.target == null) { - config.target = path.resolve("/etc/yum.repos.d", path.basename(config.source)); - } +export default { + handler: async function ({ config, ssh, tools: { log, path, status } }) { + // TODO wdavidw 180115, target should be mandatory and not default to the source filename + if (config.source != null && config.target == null) { + config.target = path.resolve( + "/etc/yum.repos.d", + path.basename(config.source) + ); } - config.target = path.resolve('/etc/yum.repos.d', config.target); + // Unless absolute, path is relative to the default yum repo location + config.target = path.resolve("/etc/yum.repos.d", config.target); + // Globing expression relative to the parent target directory if (config.clean) { config.clean = path.resolve(path.dirname(config.target), config.clean); } - // Variable initiation - let $status = false; - let remote_files = []; // Delete if (config.clean) { - log({ - message: "Searching repositories inside \"/etc/yum.repos.d/\"", - level: 'DEBUG', - module: 'nikita/lib/tools/repo' - }); - const {files} = await this.fs.glob(config.clean); - remote_files = (function() { - const results = []; - for (const file of files) { - if (file === config.target) { - continue; - } - results.push(file); - } - return results; - })(); + log("DEBUG", 'Searching repositories inside "/etc/yum.repos.d/"'); + const files = await this.fs + .glob(config.clean) + .then(({ files }) => files.filter((file) => file !== config.target)); + await this.fs.remove(files); } - await this.fs.remove(remote_files); // Use download unless we are over ssh, in such case, // the source default to target host unless local is provided - const isFile = config.source && url.parse(config.source).protocol === null; + const isFile = config.source && URL.canParse(config.source) === false; if ( config.source != null && - (!isFile || (ssh != null && config.local != null)) + (!isFile || (ssh != null && config.local === true)) ) { + // Source is a URL or it is imported from local host if there is an SSH connection await this.file.download({ cache: false, gid: config.gid, @@ -83,18 +67,15 @@ module.exports = { }); } // Parse the definition file - log(`Read GPG keys from ${config.target}`, { - level: 'DEBUG', - module: 'nikita/lib/tools/repo' - }); + log("DEBUG", `Read GPG keys from ${config.target}`); // Extract repo information from file const data = utils.ini.parse_multi_brackets( - ( - await this.fs.base.readFile({ + await this.fs.base + .readFile({ target: config.target, - encoding: 'utf8' + encoding: "utf8", }) - ).data + .then(({ data }) => data) ); // Extract repo IDs const repoids = Object.keys(data); @@ -119,41 +100,40 @@ module.exports = { } // Download GPG Keys if (config.verify) { - for (const gpgKey of gpgKeys) { - log(`Downloading GPG keys from ${gpgKey}`, { - level: 'DEBUG', - module: 'nikita/lib/tools/repo' - }); - ({$status} = await this.file.download({ + const areKeysUpdated = await each(gpgKeys, async (gpgKey) => { + log("DEBUG", `Downloading GPG key from ${gpgKey}`); + const { $status: isKeyUpdated } = await this.file.download({ + location: config.location, source: gpgKey, - target: `${config.gpg_dir}/${path.basename(gpgKey)}` - })); - ({$status} = await this.execute({ - $if: $status, - command: `rpm --import ${config.gpg_dir}/${path.basename(gpgKey)}` - })); - } + target: `${config.gpg_dir}/${path.basename(gpgKey)}`, + }); + await this.execute({ + $if: isKeyUpdated, + command: `rpm --import ${config.gpg_dir}/${path.basename(gpgKey)}`, + }); + return isKeyUpdated; + }).then( statuses => statuses.some( status => status === true)); + // Clean Metadata + await this.execute({ + $if: path.relative("/etc/yum.repos.d", config.target) !== ".." && areKeysUpdated, + // wdavidw: 180114, was "yum clean metadata" + // explanation is provided in case of revert. + // expire-cache is much faster, it forces yum to go redownload the small + // repo files only, then if there's newer repo data, it will download it. + command: "yum clean expire-cache; yum repolist -y", + }); } - // Clean Metadata - ({$status} = await this.execute({ - $if: path.relative('/etc/yum.repos.d', config.target) !== '..' && $status, - // wdavidw: 180114, was "yum clean metadata" - // explanation is provided in case of revert. - // expire-cache is much faster, it forces yum to go redownload the small - // repo files only, then if there's newer repo data, it will download it. - command: 'yum clean expire-cache; yum repolist -y' - })); - if (config.update && $status) { + if (config.update && status()) { await this.execute({ command: dedent` - yum update -y --disablerepo=* --enablerepo='${repoids.join(',')}' + yum update -y --disablerepo=* --enablerepo='${repoids.join(",")}' yum repolist `, - trap: true + trap: true, }); } }, metadata: { - definitions: definitions - } + definitions: definitions, + }, }; diff --git a/packages/tools/lib/repo/schema.json b/packages/tools/lib/repo/schema.json index 1cb60f5d1..2ea4ed4c0 100644 --- a/packages/tools/lib/repo/schema.json +++ b/packages/tools/lib/repo/schema.json @@ -11,7 +11,7 @@ }, "clean": { "type": "string", - "description": "Globing expression used to match replaced files. When relative, the\npath is resolved to the parent target directory which is\n'/etc/yum.repos.d' when the target is a filename." + "description": "Globing expression used to match replaced files. When relative, the path is relative to the target parent directory, eg `/etc/yum.repos.d` when the target is a filename." }, "gpg_dir": { "type": "string", @@ -20,11 +20,16 @@ }, "gpg_key": { "type": "string", - "description": "Import specified key into the gpg_dir specified, downloads\nthe file if it's an url." + "description": "Import specified key into the gpg_dir specified, downloads the file if it's an url." }, "local": { - "$ref": "module://@nikitajs/file/lib/index#/definitions/config/properties/local", - "default": false + "$ref": "module://@nikitajs/file#/definitions/config/properties/local", + "default": false, + "description": "Import file from local host to remote host, only active if source is a file and with an SSH connection." + }, + "location": { + "$ref": "module://@nikitajs/file/download#/definitions/config/properties/location", + "default": true }, "source": { "type": "string", @@ -32,7 +37,7 @@ }, "target": { "type": "string", - "description": "Path of the repository definition file, relative to\n'/etc/yum.repos.d'." + "description": "Path of the repository definition file, relative to '/etc/yum.repos.d'." }, "update": { "type": "boolean", @@ -42,7 +47,7 @@ "verify": { "type": "boolean", "default": true, - "description": "Download the PGP keys if it's enabled in the repo file, keys are by\ndefault placed inside \"/etc/pki/rpm-gpg\" defined by the gpg_dir option\nand the filename is derivated from the url." + "description": "Download the PGP keys if it's enabled in the repo file, keys are by default placed inside \"/etc/pki/rpm-gpg\" defined by the gpg_dir option and the filename is derivated from the url." } }, "oneOf": [ diff --git a/packages/tools/lib/rubygems/fetch/index.js b/packages/tools/lib/rubygems/fetch/index.js index 2f10f8ffa..5d6f0d340 100644 --- a/packages/tools/lib/rubygems/fetch/index.js +++ b/packages/tools/lib/rubygems/fetch/index.js @@ -1,10 +1,10 @@ // Dependencies -const path = require('path'); -const definitions = require("./schema.json"); +import path from 'node:path' +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { // Get version if (!config.version) { diff --git a/packages/tools/lib/rubygems/install/index.js b/packages/tools/lib/rubygems/install/index.js index a1b75914c..8e628cfd6 100644 --- a/packages/tools/lib/rubygems/install/index.js +++ b/packages/tools/lib/rubygems/install/index.js @@ -1,11 +1,11 @@ // Dependencies -const semver = require('semver'); -const utils = require('../../utils'); -const definitions = require("./schema.json"); +import semver from 'semver'; +import utils from "@nikitajs/tools/utils"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function ({ config, tools: { path } }) { const gems = {}; if (gems[config.name] == null) { diff --git a/packages/tools/lib/rubygems/install/schema.json b/packages/tools/lib/rubygems/install/schema.json index 8d958ef94..7e518170b 100644 --- a/packages/tools/lib/rubygems/install/schema.json +++ b/packages/tools/lib/rubygems/install/schema.json @@ -11,7 +11,7 @@ "description": "Pass flags to the compiler." }, "bash": { - "$ref": "module://@nikitajs/core/lib/actions/execute#/definitions/config/properties/bash" + "$ref": "module://@nikitajs/core/actions/execute#/definitions/config/properties/bash" }, "gem_bin": { "type": "string", diff --git a/packages/tools/lib/rubygems/remove/index.js b/packages/tools/lib/rubygems/remove/index.js index c414bc293..0ca24800c 100644 --- a/packages/tools/lib/rubygems/remove/index.js +++ b/packages/tools/lib/rubygems/remove/index.js @@ -1,10 +1,10 @@ // Dependencies -const dedent = require('dedent'); -const definitions = require("./schema.json"); +import dedent from "dedent"; +import definitions from "./schema.json" assert { type: "json" }; // Action -module.exports = { +export default { handler: async function({config}) { if (config.gem_bin == null) { config.gem_bin = 'gem'; diff --git a/packages/tools/lib/ssh/keygen/index.js b/packages/tools/lib/ssh/keygen/index.js index 0d69d7173..61919f33c 100644 --- a/packages/tools/lib/ssh/keygen/index.js +++ b/packages/tools/lib/ssh/keygen/index.js @@ -1,10 +1,10 @@ // Dependencies -const definitions = require("./schema.json"); -const esa = require('@nikitajs/core/lib/utils').string.escapeshellarg; +import definitions from "./schema.json" assert { type: "json" }; +import { escapeshellarg as esa } from "@nikitajs/core/utils/string"; // Action -module.exports = { +export default { handler: async function({ config, tools: {path} diff --git a/packages/tools/lib/sysctl/index.js b/packages/tools/lib/sysctl/index.js index e45b8d764..7629b9f77 100644 --- a/packages/tools/lib/sysctl/index.js +++ b/packages/tools/lib/sysctl/index.js @@ -1,10 +1,10 @@ // Dependencies -const definitions = require("./schema.json"); -const utils = require('../utils'); +import definitions from "./schema.json" assert { type: "json" }; +import utils from "@nikitajs/tools/utils"; // Action -module.exports = { +export default { handler: async function({ config, tools: {log} diff --git a/packages/tools/lib/utils/index.js b/packages/tools/lib/utils/index.js index d5ede7029..311bba2c0 100644 --- a/packages/tools/lib/utils/index.js +++ b/packages/tools/lib/utils/index.js @@ -1,9 +1,9 @@ -const utils = require('@nikitajs/core/lib/utils'); -const diff = require('@nikitajs/file/lib/utils/diff'); -const iptables = require('./iptables'); +import utils from "@nikitajs/core/utils"; +import { diff } from '@nikitajs/file/utils'; +import iptables from '@nikitajs/tools/utils/iptables'; -module.exports = { +export default { ...utils, diff: diff, iptables: iptables diff --git a/packages/tools/lib/utils/iptables.js b/packages/tools/lib/utils/iptables.js index 0ee759600..8fde903fe 100644 --- a/packages/tools/lib/utils/iptables.js +++ b/packages/tools/lib/utils/iptables.js @@ -1,16 +1,16 @@ +import array from "@nikitajs/core/utils/array"; +import string from "@nikitajs/core/utils/string"; +import jsesc from "jsesc"; +import { merge } from "mixme"; -const utils = require('@nikitajs/core/lib/utils'); -const jsesc = require('jsesc'); -const {merge} = require('mixme'); - -const equals = function(obj1, obj2, keys) { +const _equals = function (obj1, obj2, keys) { let keys1 = Object.keys(obj1); let keys2 = Object.keys(obj2); if (keys) { - keys1 = keys1.filter(function(k) { + keys1 = keys1.filter(function (k) { return keys.indexOf(k) !== -1; }); - keys2 = keys2.filter(function(k) { + keys2 = keys2.filter(function (k) { return keys.indexOf(k) !== -1; }); } else { @@ -28,411 +28,491 @@ const equals = function(obj1, obj2, keys) { return true; }; -const iptables = { - // add_properties: ['target', 'protocol', 'dport', 'in-interface', 'out-interface', 'source', 'target'] - add_properties: ['--protocol', '--source', '---target', '--jump', '--goto', '--in-interface', '--out-interface', '--fragment', 'tcp|--source-port', 'tcp|--sport', 'tcp|--target-port', 'tcp|--dport', 'tcp|--tcp-flags', 'tcp|--syn', 'tcp|--tcp-option', 'udp|--source-port', 'udp|--sport', 'udp|--target-port', 'udp|--dport'], - // modify_properties: ['state', 'comment'] +const constants = { + add_properties: [ + "--protocol", + "--source", + "---target", + "--jump", + "--goto", + "--in-interface", + "--out-interface", + "--fragment", + "tcp|--source-port", + "tcp|--sport", + "tcp|--target-port", + "tcp|--dport", + "tcp|--tcp-flags", + "tcp|--syn", + "tcp|--tcp-option", + "udp|--source-port", + "udp|--sport", + "udp|--target-port", + "udp|--dport", + ], modify_properties: [ - '--set-counters', - '--log-level', - '--log-prefix', - '--log-tcp-sequence', - '--log-tcp-options', // LOG - '--log-ip-options', - '--log-uid', // LOG - 'state|--state', - 'comment|--comment', - 'limit|--limit' + "--set-counters", + "--log-level", + "--log-prefix", + "--log-tcp-sequence", + "--log-tcp-options", // LOG + "--log-ip-options", + "--log-uid", // LOG + "state|--state", + "comment|--comment", + "limit|--limit", ], - commands_arguments: { // Used to compute rulenum - '-A': ['chain'], - '-D': ['chain'], - '-I': ['chain'], - '-R': ['chain'], - '-N': ['chain'], - '-X': ['chain'], - '-P': ['chain', 'target'], - '-L': true, - '-S': true, - '-F': true, - '-Z': true, - '-E': true + // Used to compute rulenum + commands_arguments: { + "-A": ["chain"], + "-D": ["chain"], + "-I": ["chain"], + "-R": ["chain"], + "-N": ["chain"], + "-X": ["chain"], + "-P": ["chain", "target"], + "-L": true, + "-S": true, + "-F": true, + "-Z": true, + "-E": true, }, commands_inverted: { - '--append': '-A', - '--delete': '-D', - '--insert': '-I', - '--replace': '-R', - '--new-chain': '-N', - '--delete-chain': '-X', - '--policy': '-P', - '--list': '-L', - '--list-rules': '-S', - '--flush': '-F', - '--zero': '-Z', - '--rename-chain': '-E' + "--append": "-A", + "--delete": "-D", + "--insert": "-I", + "--replace": "-R", + "--new-chain": "-N", + "--delete-chain": "-X", + "--policy": "-P", + "--list": "-L", + "--list-rules": "-S", + "--flush": "-F", + "--zero": "-Z", + "--rename-chain": "-E", }, - // parameters: ['-p', '-s', '-d', '-j', '-g', '-i', '-o', '-f', '-c'] # , '--log-prefix' - // parameters_inverted: - // '--protocol': '-p', '--source': '-s', '--target': '-d', '--jump': '-j' - // '--goto': '-g', '--in-interface': '-i', '--out-interface': '-o', - // '--fragment': '-f', '--set-counters': '-c' parameters: [ - '--protocol', - '--source', - '--target', - '--jump', - '--goto', - '--in-interface', - '--out-interface', - '--fragment', - '--set-counters', - '--log-level', - '--log-prefix', - '--log-tcp-sequence', - '--log-tcp-options', // LOG - '--log-ip-options', - '--log-uid' // LOG + "--protocol", + "--source", + "--target", + "--jump", + "--goto", + "--in-interface", + "--out-interface", + "--fragment", + "--set-counters", + "--log-level", + "--log-prefix", + "--log-tcp-sequence", + "--log-tcp-options", // LOG + "--log-ip-options", + "--log-uid", // LOG ], parameters_inverted: { - '-p': '--protocol', - '-s': '--source', - '-d': '--target', - '-j': '--jump', - '-g': '--goto', - '-i': '--in-interface', - '-o': '--out-interface', - '-f': '--fragment', - '-c': '--set-counters' + "-p": "--protocol", + "-s": "--source", + "-d": "--target", + "-j": "--jump", + "-g": "--goto", + "-i": "--in-interface", + "-o": "--out-interface", + "-f": "--fragment", + "-c": "--set-counters", }, protocols: { - tcp: ['--source-port', '--sport', '--target-port', '--dport', '--tcp-flags', '--syn', '--tcp-option'], - udp: ['--source-port', '--sport', '--target-port', '--dport'], + tcp: [ + "--source-port", + "--sport", + "--target-port", + "--dport", + "--tcp-flags", + "--syn", + "--tcp-option", + ], + udp: ["--source-port", "--sport", "--target-port", "--dport"], udplite: [], icmp: [], esp: [], ah: [], sctp: [], - all: [] + all: [], }, modules: { - state: ['--state'], - comment: ['--comment'], - limit: ['--limit'] - }, - command_args: function(command, rule) { - for (const k in rule) { - const v = rule[k]; - if (['chain', 'rulenum', 'command'].indexOf(k) !== -1) { - continue; - } - if (v == null) { - continue; - } - const match = /^([\w]+)\|([-\w]+)$/.exec(k) - if (match) { - const module = match[1]; - const arg = match[2]; - command += ` -m ${module}`; - command += ` ${arg} ${v}`; - } else { - command += ` ${k} ${v}`; - } - } - return command; - }, - command_replace: function(rule) { - if (rule.rulenum == null) { - rule.rulenum = 1; + state: ["--state"], + comment: ["--comment"], + limit: ["--limit"], + } +} + + +const command_args = function (command, rule) { + for (const k in rule) { + const v = rule[k]; + if (["chain", "rulenum", "command"].indexOf(k) !== -1) { + continue; } - return iptables.command_args(`iptables -R ${rule.chain} ${rule.rulenum}`, rule); - }, - command_insert: function(rule) { - if (rule.rulenum == null) { - rule.rulenum = 1; + if (v == null) { + continue; } - return iptables.command_args(`iptables -I ${rule.chain} ${rule.rulenum}`, rule); - }, - command_append: function(rule) { - if (rule.rulenum == null) { - rule.rulenum = 1; + const match = /^([\w]+)\|([-\w]+)$/.exec(k); + if (match) { + const module = match[1]; + const arg = match[2]; + command += ` -m ${module}`; + command += ` ${arg} ${v}`; + } else { + command += ` ${k} ${v}`; } - return iptables.command_args(`iptables -A ${rule.chain}`, rule); - }, - command: function(oldrules, newrules) { - const commands = []; - const new_chains = []; - const old_chains = oldrules.map(function(oldrule) { + } + return command; +}; + +const command_replace = function (rule) { + if (rule.rulenum == null) { + rule.rulenum = 1; + } + return command_args( + `iptables -R ${rule.chain} ${rule.rulenum}`, + rule + ); +}; + +const command_insert = function (rule) { + if (rule.rulenum == null) { + rule.rulenum = 1; + } + return command_args( + `iptables -I ${rule.chain} ${rule.rulenum}`, + rule + ); +}; + +const command_append = function (rule) { + if (rule.rulenum == null) { + rule.rulenum = 1; + } + return command_args(`iptables -A ${rule.chain}`, rule); +}; + +const command = function (oldrules, newrules) { + const commands = []; + const new_chains = []; + const old_chains = oldrules + .map(function (oldrule) { return oldrule.chain; - }).filter(function(chain, i, chains) { - return ['INPUT', 'FORWARD', 'OUTPUT'].indexOf(chain) < 0 && chains.indexOf(chain) >= i; + }) + .filter(function (chain, i, chains) { + return ( + ["INPUT", "FORWARD", "OUTPUT"].indexOf(chain) < 0 && + chains.indexOf(chain) >= i + ); }); - // Create new chains - for (const newrule of newrules) { - if (['INPUT', 'FORWARD', 'OUTPUT'].indexOf(newrule.chain) < 0 && new_chains.indexOf(newrule.chain) < 0 && old_chains.indexOf(newrule.chain) < 0) { - new_chains.push(newrule.chain); - commands.push(`iptables -N ${newrule.chain}`); - } + // Create new chains + for (const newrule of newrules) { + if ( + ["INPUT", "FORWARD", "OUTPUT"].indexOf(newrule.chain) < 0 && + new_chains.indexOf(newrule.chain) < 0 && + old_chains.indexOf(newrule.chain) < 0 + ) { + new_chains.push(newrule.chain); + commands.push(`iptables -N ${newrule.chain}`); } - for (const newrule of newrules) { - // break if newrule.rulenum? #or newrule.command is '-A' - if (newrule.after && !newrule.rulenum) { - let rulenum = 0; - for (const oldrule of oldrules) { - if (!(oldrule.command === '-A' && oldrule.chain === newrule.chain)) { - continue; - } - rulenum++; - if (equals(newrule.after, oldrule, Object.keys(newrule.after))) { - // newrule.rulenum = rulenum + 1 - newrule.rulenum = oldrule.rulenum + 1; - } + } + for (const newrule of newrules) { + // break if newrule.rulenum? #or newrule.command is '-A' + if (newrule.after && !newrule.rulenum) { + let rulenum = 0; + for (const oldrule of oldrules) { + if (!(oldrule.command === "-A" && oldrule.chain === newrule.chain)) { + continue; } - // break - delete newrule.after; - } - if (newrule.before && !newrule.rulenum) { - let rulenum = 0; - for (const oldrule of oldrules) { - if (!(oldrule.command === '-A' && oldrule.chain === newrule.chain)) { - continue; - } - rulenum++; - if (equals(newrule.before, oldrule, Object.keys(newrule.before))) { - // newrule.rulenum = rulenum - newrule.rulenum = oldrule.rulenum; - break; - } + rulenum++; + if (_equals(newrule.after, oldrule, Object.keys(newrule.after))) { + // newrule.rulenum = rulenum + 1 + newrule.rulenum = oldrule.rulenum + 1; } - delete newrule.before; } - let create = true; - // Get add properties present in new rule - const add_properties = utils.array.intersect(iptables.add_properties, Object.keys(newrule)); + // break + delete newrule.after; + } + if (newrule.before && !newrule.rulenum) { + let rulenum = 0; for (const oldrule of oldrules) { - if (oldrule.chain !== newrule.chain) { + if (!(oldrule.command === "-A" && oldrule.chain === newrule.chain)) { continue; } - // Add properties are the same - if (equals(newrule, oldrule, add_properties)) { - create = false; - // Check if we need to update - if (!equals(newrule, oldrule, iptables.modify_properties)) { - // Remove the command - const baserule = merge(oldrule); - for (const k in baserule) { - if (iptables.commands_arguments[k]) { - baserule[k] = undefined; - } - baserule.command = undefined; - newrule.rulenum = undefined; + rulenum++; + if (_equals(newrule.before, oldrule, Object.keys(newrule.before))) { + // newrule.rulenum = rulenum + newrule.rulenum = oldrule.rulenum; + break; + } + } + delete newrule.before; + } + let create = true; + // Get add properties present in new rule + const add_properties = array.intersect( + constants.add_properties, + Object.keys(newrule) + ); + for (const oldrule of oldrules) { + if (oldrule.chain !== newrule.chain) { + continue; + } + // Add properties are the same + if (_equals(newrule, oldrule, add_properties)) { + create = false; + // Check if we need to update + if (!_equals(newrule, oldrule, constants.modify_properties)) { + // Remove the command + const baserule = merge(oldrule); + for (const k in baserule) { + if (constants.commands_arguments[k]) { + baserule[k] = undefined; } - commands.push(iptables.command_replace(merge(baserule, newrule))); + baserule.command = undefined; + newrule.rulenum = undefined; } + commands.push(command_replace(merge(baserule, newrule))); } } - // Add properties are different - if (create) { - commands.push(newrule.command === '-A' ? iptables.command_append(newrule) : iptables.command_insert(newrule)); + } + // Add properties are different + if (create) { + commands.push( + newrule.command === "-A" + ? command_append(newrule) + : command_insert(newrule) + ); + } + } + return commands; +}; + +const normalize = function (rules, position = true) { + const oldrules = merge(Array.isArray(rules) ? rules : [rules]); + const newrules = []; + for (const oldrule of oldrules) { + let newrule = {}; + // Search for commands and parameters + for (const key in oldrule) { + let value = oldrule[key]; + let nkey = null; + if (typeof value === "number") { + // Normalize value as string + value = oldrule[key] = `${value}`; + } + // Normalize key as shortname (eg "-k") + if (key === "chain" || key === "rulenum" || key === "command") { + // Final name, mark key as done + nkey = key; + } else if ( + key.slice(0, 2) === "--" && + constants.parameters.indexOf(key) >= 0 + ) { + // nkey = constants.parameters_inverted[k] + nkey = key; + } else if ( + key[0] !== "-" && + constants.parameters.indexOf(`--${key}`) >= 0 + ) { + // nkey = constants.parameters_inverted["--#{key}"] + nkey = `--${key}`; + // else if constants.parameters.indexOf(key) isnt -1 + } else if (constants.parameters_inverted[key]) { + nkey = constants.parameters_inverted[key]; + } + // nkey = key + // Key has changed, replace it + if (nkey) { + newrule[nkey] = value; + oldrule[key] = null; } } - return commands; - }, - normalize: function(rules, position = true) { - const oldrules = merge(Array.isArray(rules) ? rules : [rules]); - const newrules = []; - for (const oldrule of oldrules) { - let newrule = {}; - // Search for commands and parameters - for (const key in oldrule) { - let value = oldrule[key]; - let nkey = null; - if (typeof value === 'number') { - // Normalize value as string - value = oldrule[key] = `${value}`; - } - // Normalize key as shortname (eg "-k") - if (key === 'chain' || key === 'rulenum' || key === 'command') { - // Final name, mark key as done - nkey = key; - } else if (key.slice(0, 2) === '--' && iptables.parameters.indexOf(key) >= 0) { - // nkey = iptables.parameters_inverted[k] - nkey = key; - } else if (key[0] !== '-' && iptables.parameters.indexOf(`--${key}`) >= 0) { - // nkey = iptables.parameters_inverted["--#{key}"] - nkey = `--${key}`; - // else if iptables.parameters.indexOf(key) isnt -1 - } else if (iptables.parameters_inverted[key]) { - nkey = iptables.parameters_inverted[key]; - } - // nkey = key - // Key has changed, replace it - if (nkey) { - newrule[nkey] = value; + // Add prototol specific options + const protocol = newrule["--protocol"]; + if (protocol != null) { + for (const key of constants.protocols[protocol]) { + if (oldrule[key]) { + newrule[`${protocol}|${key}`] = oldrule[key]; oldrule[key] = null; + } else if (oldrule[key.slice(2)]) { + newrule[`${protocol}|${key}`] = oldrule[key.slice(2)]; + oldrule[key.slice(2)] = null; } } - // Add prototol specific options - const protocol = newrule['--protocol']; - if (protocol != null) { - for (const key of iptables.protocols[protocol]) { - if (oldrule[key]) { - newrule[`${protocol}|${key}`] = oldrule[key]; + } + for (let key in oldrule) { + const value = oldrule[key]; + if (!value) { + continue; + } + if (key === "after" || key === "before") { + newrule[key] = normalize(value, false); + continue; + } + if (key.slice(0, 2) !== "--") { + key = `--${key}`; + } + for (const mk in constants.modules) { + const mvs = constants.modules[mk]; + for (const mv of mvs) { + if (key === mv) { + newrule[`${mk}|${key}`] = value; oldrule[key] = null; - } else if (oldrule[key.slice(2)]) { - newrule[`${protocol}|${key}`] = oldrule[key.slice(2)]; - oldrule[key.slice(2)] = null; } } } - for (let key in oldrule) { - const value = oldrule[key]; - if (!value) { - continue; - } - if (key === 'after' || key === 'before') { - newrule[key] = iptables.normalize(value, false); - continue; - } - if (key.slice(0, 2) !== '--') { - key = `--${key}`; - } - for (const mk in iptables.modules) { - const mvs = iptables.modules[mk]; - for (const mv of mvs) { - if (key === mv) { - newrule[`${mk}|${key}`] = value; - oldrule[key] = null; - } - } - } + } + for (const key in newrule) { + let value = newrule[key]; + if (key === "command") { + continue; } - for (const key in newrule) { - let value = newrule[key]; - if (key === 'command') { - continue; - } - // Discard default log level value - if (key === '--log-level' && value === '4') { - delete newrule[key]; - continue; - } - if (key === 'comment|--comment') { - // IPTables silently remove minus signs - value = value.replace('-', ''); - } - if (['--log-prefix', 'comment|--comment'].indexOf(key) !== -1) { - value = jsesc(value, { - quotes: 'double', - wrap: true - }); - } - newrule[key] = value; + // Discard default log level value + if (key === "--log-level" && value === "4") { + delete newrule[key]; + continue; } - newrules.push(newrule); - if (position && newrule.command !== '-A') { - for (const newrule of newrules) { - if (!(newrule.after != null || newrule.before != null)) { - newrule.after = { - '-A': 'INPUT', - chain: 'INPUT', - '--jump': 'ACCEPT' - }; - } + if (key === "comment|--comment") { + // IPTables silently remove minus signs + value = value.replace("-", ""); + } + if (["--log-prefix", "comment|--comment"].indexOf(key) !== -1) { + value = jsesc(value, { + quotes: "double", + wrap: true, + }); + } + newrule[key] = value; + } + newrules.push(newrule); + if (position && newrule.command !== "-A") { + for (const newrule of newrules) { + if (!(newrule.after != null || newrule.before != null)) { + newrule.after = { + "-A": "INPUT", + chain: "INPUT", + "--jump": "ACCEPT", + }; } } } - if (Array.isArray(rules)) { - return newrules; - } else { - return newrules[0]; + } + if (Array.isArray(rules)) { + return newrules; + } else { + return newrules[0]; + } +}; + +/* +Parse the result of `iptables -S` +*/ +const parse = function (stdout) { + const rules = []; + const command_index = {}; + for (const line of string.lines(stdout)) { + if (line.length === 0) { + continue; } - }, - /* - Parse the result of `iptables -S` - */ - parse: function(stdout) { - const rules = []; - const command_index = {}; - for (const line of utils.string.lines(stdout)) { - if (line.length === 0) { - continue; - } - const rule = {}; - let i = 0; - let key = ''; - let value = ''; - let module = null; - while (i <= line.length) { - let char = line[i]; - const forceflush = i === line.length; - const newarg = (i === 0 && char === '-') || line.slice((i - 1), +i + 1 || 9e9) === ' -'; - if (newarg || forceflush) { - if (value) { - value = value.trim(); - if (key === '-m') { - module = value; - } else { - if (module) { - key = `${module}|${key}`; + const rule = {}; + let i = 0; + let key = ""; + let value = ""; + let module = null; + while (i <= line.length) { + let char = line[i]; + const forceflush = i === line.length; + const newarg = + (i === 0 && char === "-") || line.slice(i - 1, +i + 1 || 9e9) === " -"; + if (newarg || forceflush) { + if (value) { + value = value.trim(); + if (key === "-m") { + module = value; + } else { + if (module) { + key = `${module}|${key}`; + } + if (constants.parameters_inverted[key]) { + key = constants.parameters_inverted[key]; + } + rule[key] = value; + } + // First key is a command + if (constants.commands_arguments[key]) { + // Determine rule number + if (Array.isArray(constants.commands_arguments[key])) { + rule.command = key; + const valueSplit = value.split(" "); + for (const k in valueSplit) { + rule[constants.commands_arguments[key][k]] = valueSplit[k]; } - if (iptables.parameters_inverted[key]) { - key = iptables.parameters_inverted[key]; + if (command_index[rule.chain] == null) { + command_index[rule.chain] = 0; } - rule[key] = value; - } - // First key is a command - if (iptables.commands_arguments[key]) { - // Determine rule number - if (Array.isArray(iptables.commands_arguments[key])) { - rule.command = key; - const valueSplit = value.split(' '); - for (const k in valueSplit) { - rule[iptables.commands_arguments[key][k]] = valueSplit[k]; - } - if (command_index[rule.chain] == null) { - command_index[rule.chain] = 0; - } - if (['-P', '-N'].indexOf(key) === -1) { - rule.rulenum = ++command_index[rule.chain]; - } + if (["-P", "-N"].indexOf(key) === -1) { + rule.rulenum = ++command_index[rule.chain]; } } - key = ''; - value = ''; - if (forceflush) { - break; - } } - key += char; - while ((char = line[++i]) !== ' ') { // and line[i]? - key += char; - } - // if iptables.parameters.indexOf(key) isnt -1 - if (iptables.parameters_inverted[key]) { - module = null; + key = ""; + value = ""; + if (forceflush) { + break; } - continue; } - if (char === '"') { - while ((char = line[++i]) !== '"') { - value += char; - } - i++; - continue; + key += char; + while ((char = line[++i]) !== " ") { + // and line[i]? + key += char; } - while (char + (char = line[++i]) !== ' -' && i < line.length) { - if (char === '-' && key === '--comment') { - // IPTable silently remove minus sign from comment - continue; - } + // if constants.parameters.indexOf(key) isnt -1 + if (constants.parameters_inverted[key]) { + module = null; + } + continue; + } + if (char === '"') { + while ((char = line[++i]) !== '"') { value += char; } + i++; + continue; + } + while (char + (char = line[++i]) !== " -" && i < line.length) { + if (char === "-" && key === "--comment") { + // IPTable silently remove minus sign from comment + continue; + } + value += char; } - rules.push(rule); } - return rules; + rules.push(rule); } + return rules; }; -module.exports = iptables; +export { + constants, + command_args, + command_replace, + command_insert, + command_append, + command, + normalize, + parse, +}; + +export default { + constants, + command_args: command_args, + command_replace: command_replace, + command_insert: command_insert, + command_append: command_append, + command: command, + normalize: normalize, + parse: parse, +}; diff --git a/packages/tools/package.json b/packages/tools/package.json index 1abe792dc..dc58bf9bc 100644 --- a/packages/tools/package.json +++ b/packages/tools/package.json @@ -1,5 +1,6 @@ { "name": "@nikitajs/tools", + "version": "1.0.0-alpha.3", "description": "Provides Nikita actions for various CLI tools.", "keywords": [ "nikita", @@ -13,7 +14,6 @@ "system", "task" ], - "version": "1.0.0-alpha.3", "author": "David Worms ", "bugs": { "url": "https://github.com/adaltas/node-nikita/issues" @@ -56,6 +56,12 @@ "mocha-they": "^0.1.3", "should": "^13.2.3" }, + "exports": { + "./register": "./lib/register.js", + "./utils": "./lib/utils/index.js", + "./utils/*": "./lib/utils/*.js", + "./*": "./lib/*/index.js" + }, "peerDependencies": { "@nikitajs/core": "^1.0.0-alpha.1" }, @@ -66,16 +72,17 @@ "/lib" ], "mocha": { - "throw-deprecation": true, - "require": [ - "should", - "coffeescript/register", - "@nikitajs/tools/lib/register" - ], "inline-diffs": true, - "timeout": 40000, + "loader": "../core/test/loaders/all.js", + "recursive": true, "reporter": "spec", - "recursive": true + "require": [ + "@nikitajs/service/register", + "@nikitajs/tools/register", + "should" + ], + "throw-deprecation": true, + "timeout": 40000 }, "publishConfig": { "access": "public" @@ -91,5 +98,6 @@ "test": "npm run test:local && npm run test:env", "test:env": "env/run.sh", "test:local": "mocha 'test/**/*.coffee'" - } + }, + "type": "module" } diff --git a/packages/tools/test.sample.coffee b/packages/tools/test.sample.coffee index b379f0503..aba8de652 100644 --- a/packages/tools/test.sample.coffee +++ b/packages/tools/test.sample.coffee @@ -1,5 +1,5 @@ -module.exports = +export default tags: posix: true tools_cron: false # disable_cron @@ -19,5 +19,5 @@ module.exports = # Exemple with vagrant: # ssh: # host: '127.0.0.1', port: 2222, username: 'vagrant' - # private_key_path: "#{require('os').homedir()}/.vagrant.d/insecure_private_key" + # private_key_path: "#{os.homedir()}/.vagrant.d/insecure_private_key" ] diff --git a/packages/tools/test/backup.coffee b/packages/tools/test/backup.coffee index a53d2abdd..637c8d1e9 100644 --- a/packages/tools/test/backup.coffee +++ b/packages/tools/test/backup.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.backup', -> + return unless test.tags.posix describe 'file', -> @@ -14,9 +14,12 @@ describe 'tools.backup', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> + await @file + content: 'Hello' + target: "#{tmpdir}/a_file" {$status, filename} = await @tools.backup name: 'my_backup' - source: "#{__filename}" + source: "#{tmpdir}/a_file" target: "#{tmpdir}/backup" $status.should.be.true() @fs.assert @@ -25,7 +28,7 @@ describe 'tools.backup', -> @wait 1000 {$status, filename} = await @tools.backup name: 'my_backup' - source: "#{__filename}" + source: "#{tmpdir}/a_file" target: "#{tmpdir}/backup" $status.should.be.true() @fs.assert @@ -37,9 +40,12 @@ describe 'tools.backup', -> $ssh: ssh $tmpdir: true , ({metadata: {tmpdir}}) -> + await @file + content: 'Hello' + target: "#{tmpdir}/a_file" {$status, base_dir, name, filename, target} = await @tools.backup name: 'my_backup' - source: "#{__filename}" + source: "#{tmpdir}/a_file" target: "#{tmpdir}/backup" compress: true $status.should.be.true() diff --git a/packages/tools/test/compress.coffee b/packages/tools/test/compress.coffee index 8e760a655..495c52ca7 100644 --- a/packages/tools/test/compress.coffee +++ b/packages/tools/test/compress.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.compress', -> + return unless test.tags.posix they 'should see extension .tgz', ({ssh}) -> nikita @@ -107,7 +107,7 @@ describe 'tools.compress', -> $tmpdir: true , ({metadata: {tmpdir}}) -> @tools.compress - source: __filename - target: __filename + source: "a_file.invalid" + target: "a_file.invalid" .should.be.rejectedWith - message: 'Unsupported Extension: ".coffee"' + message: 'Unsupported Extension: ".invalid"' diff --git a/packages/tools/test/cron/add.coffee b/packages/tools/test/cron/add.coffee index 423b03a55..88c913ac1 100644 --- a/packages/tools/test/cron/add.coffee +++ b/packages/tools/test/cron/add.coffee @@ -1,13 +1,8 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.tools_cron - -before -> - @timeout 5*60*1000 # 5mn - nikita.service 'cronie' +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) ### Note on OSX, by default, I got the message "crontab: no crontab for {user} - using an empty one" @@ -20,6 +15,11 @@ crontab -l ### describe 'tools.cron.add', -> + return unless test.tags.tools_cron + + before -> + @timeout 5*60*1000 # 5mn + nikita.service 'cronie' describe 'schema', -> diff --git a/packages/tools/test/cron/remove.coffee b/packages/tools/test/cron/remove.coffee index 683b7e464..13e57586a 100644 --- a/packages/tools/test/cron/remove.coffee +++ b/packages/tools/test/cron/remove.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.tools_cron +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.cron.remove', -> + return unless test.tags.tools_cron describe 'schema', -> diff --git a/packages/tools/test/dconf.coffee b/packages/tools/test/dconf.coffee index 3d14f04b6..8693e958b 100644 --- a/packages/tools/test/dconf.coffee +++ b/packages/tools/test/dconf.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.tools_dconf +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.dconf', -> + return unless test.tags.tools_dconf # Note, dconf inside docker fail to work and print # "error: Cannot autolaunch D-Bus without X11 $DISPLAY" diff --git a/packages/tools/test/extract.coffee b/packages/tools/test/extract.coffee index 37a74408d..28f9f4770 100644 --- a/packages/tools/test/extract.coffee +++ b/packages/tools/test/extract.coffee @@ -1,11 +1,12 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) +__dirname = new URL( '.', import.meta.url).pathname describe 'tools.extract', -> + return unless test.tags.posix they 'should see extension .tgz', ({ssh}) -> # Test a non existing extracted dir @@ -88,9 +89,9 @@ describe 'tools.extract', -> $ssh: ssh , -> @tools.extract - source: __filename + source: "a_file.invalid" .should.be.rejectedWith - message: 'Unsupported extension, got ".coffee"' + message: 'Unsupported extension, got ".invalid"' they 'should pass error for missing source file', ({ssh}) -> nikita diff --git a/packages/tools/test/git.coffee b/packages/tools/test/git.coffee index cb8624998..c59005b42 100644 --- a/packages/tools/test/git.coffee +++ b/packages/tools/test/git.coffee @@ -1,11 +1,12 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) +__dirname = new URL( '.', import.meta.url).pathname describe 'tools.git', -> + return unless test.tags.posix they 'clones repo into new dir', ({ssh}) -> nikita diff --git a/packages/tools/test/iptables.coffee b/packages/tools/test/iptables.coffee index 90a31c1e5..b293630c4 100644 --- a/packages/tools/test/iptables.coffee +++ b/packages/tools/test/iptables.coffee @@ -1,18 +1,17 @@ -nikita = require '@nikitajs/core/lib' -require '@nikitajs/service/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.tools_iptables +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.iptables', -> + return unless test.tags.tools_iptables they 'insert a rule after existing', ({ssh}) -> nikita $ssh: ssh , -> - @service + await @service name: 'iptables-services' srv_name: 'iptables' state: ['started'] diff --git a/packages/tools/test/npm/index.coffee b/packages/tools/test/npm/index.coffee index 79c5014a7..3b9f4bac8 100644 --- a/packages/tools/test/npm/index.coffee +++ b/packages/tools/test/npm/index.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.tools_npm +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.npm', -> + return unless test.tags.tools_npm describe 'schema', -> diff --git a/packages/tools/test/npm/list.coffee b/packages/tools/test/npm/list.coffee index 51dfd91c7..2477cc9ef 100644 --- a/packages/tools/test/npm/list.coffee +++ b/packages/tools/test/npm/list.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.tools_npm +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.npm.list', -> + return unless test.tags.tools_npm they 'option `cwd`', ({ssh}) -> nikita diff --git a/packages/tools/test/npm/outdated.coffee b/packages/tools/test/npm/outdated.coffee index 55ec67ae6..2f9c6224d 100644 --- a/packages/tools/test/npm/outdated.coffee +++ b/packages/tools/test/npm/outdated.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.tools_npm +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.npm.outdated', -> + return unless test.tags.tools_npm they 'option `cwd`', ({ssh}) -> nikita diff --git a/packages/tools/test/npm/uninstall.coffee b/packages/tools/test/npm/uninstall.coffee index 8cba12e92..1260ec7f1 100644 --- a/packages/tools/test/npm/uninstall.coffee +++ b/packages/tools/test/npm/uninstall.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.tools_npm +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.npm.uninstall', -> + return unless test.tags.tools_npm describe 'schema', -> diff --git a/packages/tools/test/npm/upgrade.coffee b/packages/tools/test/npm/upgrade.coffee index 72d02fc65..5f9b55b33 100644 --- a/packages/tools/test/npm/upgrade.coffee +++ b/packages/tools/test/npm/upgrade.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.tools_npm +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.npm.upgrade', -> + return unless test.tags.tools_npm describe 'schema', -> diff --git a/packages/tools/test/repo.coffee b/packages/tools/test/repo.coffee index 1553fadca..bcad86544 100644 --- a/packages/tools/test/repo.coffee +++ b/packages/tools/test/repo.coffee @@ -1,14 +1,14 @@ -path = require 'path' -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.tools_repo +import path from 'node:path' +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.repo', -> + return unless test.tags.tools_repo - @timeout 200000 + @timeout 400000 they 'Write with source option', ({ssh}) -> nikita @@ -99,140 +99,152 @@ describe 'tools.repo', -> target: "#{tmpdir}/target/test.repo" not: true - they 'Download GPG Keys option', ({ssh, sudo}) -> + they 'Download GPG Keys from local source', ({ssh, sudo}) -> nikita $tmpdir: true , ({metadata: {tmpdir}}) -> - source = "#{tmpdir}/linuxtech.repo" await @file $templated: true - target: source + target: "#{tmpdir}/chrome.repo" content: """ - [linuxtech-release] - name=LinuxTECH.NET el6 main repo - baseurl=http://linuxsoft.cern.ch/linuxtech/el6/release/ - mirrorlist=http://pkgrepo.linuxtech.net/el6/release/mirrorlist.txt - mirrorlist_expire=7d - enabled=1 + [google-chrome] + name=google-chrome + baseurl=https://dl.google.com/linux/chrome/rpm/stable/x86_64 + skip_if_unavailable=True gpgcheck=1 - gpgkey=http://pkgrepo.linuxtech.net/el6/release/RPM-GPG-KEY-LinuxTECH.NET + gpgkey=https://dl.google.com/linux/linux_signing_key.pub + enabled=1 """ - await nikita - $ssh: ssh - $tmpdir: true + # Note, option `verify` is enabled by default + # Validate status changed + { $status } = await @tools.repo $sudo: sudo - , ({metadata: {tmpdir}}) -> - await @tools.repo - local: true - source: "#{source}" - gpg_dir: "#{tmpdir}" - update: false - await @fs.assert "#{tmpdir}/RPM-GPG-KEY-LinuxTECH.NET" + local: true + source: "#{tmpdir}/chrome.repo" + gpg_dir: "#{tmpdir}" + update: false + $status.should.be.true() + # Validate status unchanged + { $status } = await @tools.repo + $sudo: sudo + local: true + source: "#{tmpdir}/chrome.repo" + gpg_dir: "#{tmpdir}" + update: false + $status.should.be.false() + # Ensure the GPG key is downloaded + await @fs.assert "#{tmpdir}/linux_signing_key.pub" they 'Download repo from remote location', ({ssh, sudo}) -> nikita $ssh: ssh $sudo: sudo , -> - await @fs.remove '/etc/yum.repos.d/linuxtech.repo' + await @fs.remove '/etc/yum.repos.d/gh-cli.repo' {$status} = await @tools.repo - source: "http://pkgrepo.linuxtech.net/el6/release/linuxtech.repo" + source: "https://cli.github.com/packages/rpm/gh-cli.repo" $status.should.be.true() {$status} = await @tools.repo - source: "http://pkgrepo.linuxtech.net/el6/release/linuxtech.repo" + source: "https://cli.github.com/packages/rpm/gh-cli.repo" $status.should.be.false() - await @fs.assert '/etc/yum.repos.d/linuxtech.repo' + await @fs.assert '/etc/yum.repos.d/gh-cli.repo' they 'config `update` is `false` (default)', ({ssh, sudo}) -> + # See https://linux.die.net/man/5/yum.conf for a list of supported variables nikita $ssh: ssh $sudo: sudo , -> - await @fs.remove '/etc/yum.repos.d/mongodb.repo' - await @service.remove 'mongodb-org-server' + await @fs.remove '/etc/yum.repos.d/mariadb.repo' + await @fs.remove '/etc/pki/rpm-gpg/RPM-GPG-KEY-MariaDB' + await @service.remove 'MariaDB-client' {$status} = await @tools.repo - target: '/etc/yum.repos.d/mongodb.repo' + target: '/etc/yum.repos.d/mariadb.repo' content: - 'mongodb-org-6.0': - 'name':'MongoDB Repository' - 'baseurl':'https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/6.0/x86_64/' - 'gpgcheck':'1' + 'mariadb': + 'name': 'MariaDB' + 'baseurl': "https://yum.mariadb.org/11.0/#{test.mariadb.distrib}-#{test.mariadb.basearch}" 'enabled':'1' - 'gpgkey':'https://pgp.mongodb.com/server-6.0.asc' + 'module_hotfixes': '1' + 'gpgkey': 'https://yum.mariadb.org/RPM-GPG-KEY-MariaDB' + 'gpgcheck': '1' await @service.install - name: 'mongodb-org-server' + name: 'MariaDB-client' await @execute - command: "mongod --version | grep 'db version' | awk '{print $3}' | grep 'v6.0.9'" + command: "mariadb --version | grep '11.0.4-MariaDB'" {$status} = await @tools.repo - target: '/etc/yum.repos.d/mongodb.repo' + target: '/etc/yum.repos.d/mariadb.repo' content: - 'mongodb-org-7.0': - 'name':'MongoDB Repository' - 'baseurl':'https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/7.0/x86_64/' - 'gpgcheck':'1' + 'mariadb': + 'name': 'MariaDB' + 'baseurl': "https://yum.mariadb.org/11.3/#{test.mariadb.distrib}-#{test.mariadb.basearch}" 'enabled':'1' - 'gpgkey':'https://pgp.mongodb.com/server-7.0.asc' + 'module_hotfixes': '1' + 'gpgkey': 'https://yum.mariadb.org/RPM-GPG-KEY-MariaDB' + 'gpgcheck': '1' $status.should.be.true() {$status} = await @tools.repo - target: '/etc/yum.repos.d/mongodb.repo' + target: '/etc/yum.repos.d/mariadb.repo' content: - 'mongodb-org-7.0': - 'name':'MongoDB Repository' - 'baseurl':'https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/7.0/x86_64/' - 'gpgcheck':'1' + 'mariadb': + 'name': 'MariaDB' + 'baseurl': "https://yum.mariadb.org/11.3/#{test.mariadb.distrib}-#{test.mariadb.basearch}" 'enabled':'1' - 'gpgkey':'https://pgp.mongodb.com/server-7.0.asc' + 'module_hotfixes': '1' + 'gpgkey': 'https://yum.mariadb.org/RPM-GPG-KEY-MariaDB' + 'gpgcheck': '1' $status.should.be.false() await @execute - command: "mongod --version | grep 'db version' | awk '{print $3}' | grep 'v6.0.9'" + command: "mariadb --version | grep '11.0.4-MariaDB'" they 'config `update` is `true`', ({ssh, sudo}) -> - return if ssh nikita $ssh: ssh $sudo: sudo , -> - await @fs.remove '/etc/yum.repos.d/mongodb.repo' - await @fs.remove '/etc/pki/rpm-gpg/server-6.0.asc' - await @fs.remove '/etc/pki/rpm-gpg/server-7.0.asc' - await @service.remove 'mongodb-org-server' + await @fs.remove '/etc/yum.repos.d/mariadb.repo' + await @fs.remove '/etc/pki/rpm-gpg/RPM-GPG-KEY-MariaDB' + await @service.remove 'MariaDB-client' await @tools.repo - target: '/etc/yum.repos.d/mongodb.repo' + target: '/etc/yum.repos.d/mariadb.repo' content: - 'mongodb-org-6': - 'name':'MongoDB Repository' - 'baseurl':'https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/6.0/x86_64/' - 'gpgcheck':'1' + 'mariadb': + 'name': 'MariaDB' + 'baseurl': "https://yum.mariadb.org/11.0/#{test.mariadb.distrib}-#{test.mariadb.basearch}" 'enabled':'1' - 'gpgkey':'https://pgp.mongodb.com/server-6.0.asc' + 'module_hotfixes': '1' + 'gpgkey': 'https://yum.mariadb.org/RPM-GPG-KEY-MariaDB' + 'gpgcheck': '1' await @service.install - name: 'mongodb-org-server' + name: 'MariaDB-client' await @execute - command: "mongod --version | grep 'db version' | awk '{print $3}' | grep 'v6.0.9'" + command: "mariadb --version | grep '11.0.4-MariaDB'" {$status} = await @tools.repo - target: '/etc/yum.repos.d/mongodb.repo' + target: '/etc/yum.repos.d/mariadb.repo' update: true content: - 'mongodb-org-7': - 'name':'MongoDB Repository' - 'baseurl':'https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/7.0/x86_64/' - 'gpgcheck':'1' + 'mariadb': + 'name': 'MariaDB' + 'baseurl': "https://yum.mariadb.org/11.3/#{test.mariadb.distrib}-#{test.mariadb.basearch}" 'enabled':'1' - 'gpgkey':'https://pgp.mongodb.com/server-7.0.asc' + 'module_hotfixes': '1' + 'gpgkey': 'https://yum.mariadb.org/RPM-GPG-KEY-MariaDB' + 'gpgcheck': '1' $status.should.be.true() {$status} = await @tools.repo - target: '/etc/yum.repos.d/mongodb.repo' + target: '/etc/yum.repos.d/mariadb.repo' update: true content: - 'mongodb-org-7': - 'name':'MongoDB Repository' - 'baseurl':'https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/7.0/x86_64/' - 'gpgcheck':'1' + 'mariadb': + 'name': 'MariaDB' + 'baseurl': "https://yum.mariadb.org/11.3/#{test.mariadb.distrib}-#{test.mariadb.basearch}" 'enabled':'1' - 'gpgkey':'https://pgp.mongodb.com/server-7.0.asc' + 'module_hotfixes': '1' + 'gpgkey': 'https://yum.mariadb.org/RPM-GPG-KEY-MariaDB' + 'gpgcheck': '1' $status.should.be.false() await @execute - command: "mongod --version | grep 'db version' | awk '{print $3}' | grep 'v7.0.1'" + command: "mariadb --version | grep '11.3.1-MariaDB'" they 'Download config `gpg_key` fails because `gpg_key` unset and not in .repo', ({ssh, sudo}) -> nikita diff --git a/packages/tools/test/resources/a_file b/packages/tools/test/resources/a_file deleted file mode 100644 index 76fc65925..000000000 --- a/packages/tools/test/resources/a_file +++ /dev/null @@ -1 +0,0 @@ -a content \ No newline at end of file diff --git a/packages/tools/test/resources/module_async.coffee b/packages/tools/test/resources/module_async.coffee deleted file mode 100644 index cd12a7b46..000000000 --- a/packages/tools/test/resources/module_async.coffee +++ /dev/null @@ -1,5 +0,0 @@ - -module.exports = ({options}, callback) -> - setImmediate => - @log "Hello #{options.who or 'world'}" - callback null, true diff --git a/packages/tools/test/resources/module_async_object.coffee b/packages/tools/test/resources/module_async_object.coffee deleted file mode 100644 index 7b63689c5..000000000 --- a/packages/tools/test/resources/module_async_object.coffee +++ /dev/null @@ -1,5 +0,0 @@ - -module.exports = who: 'me', author: 'me', handler: ({options}, callback) -> - setImmediate => - @log "Hello #{options.who or 'world'}" - callback null, true diff --git a/packages/tools/test/resources/module_sync.coffee b/packages/tools/test/resources/module_sync.coffee deleted file mode 100644 index b62b76017..000000000 --- a/packages/tools/test/resources/module_sync.coffee +++ /dev/null @@ -1,3 +0,0 @@ - -module.exports = ({options}) -> - @log "Hello #{options.who or 'world'}" diff --git a/packages/tools/test/resources/render.eco b/packages/tools/test/resources/render.eco deleted file mode 100644 index 000ee920d..000000000 --- a/packages/tools/test/resources/render.eco +++ /dev/null @@ -1 +0,0 @@ -Hello <%- @who %> \ No newline at end of file diff --git a/packages/tools/test/rubygems/fetch.coffee b/packages/tools/test/rubygems/fetch.coffee index 4e5ec1d4c..3fc8a0e9b 100644 --- a/packages/tools/test/rubygems/fetch.coffee +++ b/packages/tools/test/rubygems/fetch.coffee @@ -1,16 +1,16 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ruby} = require '../test' -they = require('mocha-they')(config) - -return unless tags.tools_rubygems +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.rubygems.fetch', -> + return unless test.tags.tools_rubygems they 'with a version', ({ssh}) -> nikita $ssh: ssh - ruby: ruby + ruby: test.ruby $tmpdir: true , ({metadata: {tmpdir}}) -> {$status, filename, filepath} = await @tools.rubygems.fetch @@ -26,7 +26,7 @@ describe 'tools.rubygems.fetch', -> they 'without a version', ({ssh}) -> nikita $ssh: ssh - ruby: ruby + ruby: test.ruby $tmpdir: true , ({metadata: {tmpdir}}) -> {$status, filename, filepath} = await @tools.rubygems.fetch diff --git a/packages/tools/test/rubygems/install.coffee b/packages/tools/test/rubygems/install.coffee index 44dc54fd3..5b6386ccf 100644 --- a/packages/tools/test/rubygems/install.coffee +++ b/packages/tools/test/rubygems/install.coffee @@ -1,16 +1,16 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ruby} = require '../test' -they = require('mocha-they')(config) - -return unless tags.tools_rubygems +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.rubygems.install', -> + return unless test.tags.tools_rubygems they 'install a non existing package', ({ssh}) -> nikita $ssh: ssh - ruby: ruby + ruby: test.ruby , -> await @tools.rubygems.remove name: 'execjs' @@ -23,7 +23,7 @@ describe 'tools.rubygems.install', -> they 'bypass existing package', ({ssh}) -> nikita $ssh: ssh - ruby: ruby + ruby: test.ruby , -> await @tools.rubygems.remove name: 'execjs' @@ -38,7 +38,7 @@ describe 'tools.rubygems.install', -> they 'install multiple versions', ({ssh}) -> nikita $ssh: ssh - ruby: ruby + ruby: test.ruby , -> await @tools.rubygems.remove name: 'execjs' @@ -63,7 +63,7 @@ describe 'tools.rubygems.install', -> they 'local gem from file', ({ssh}) -> nikita $ssh: ssh - ruby: ruby + ruby: test.ruby $tmpdir: true , ({metadata: {tmpdir}}) -> await @tools.rubygems.remove @@ -86,7 +86,7 @@ describe 'tools.rubygems.install', -> they 'local gem from glob', ({ssh}) -> nikita $ssh: ssh - ruby: ruby + ruby: test.ruby $tmpdir: true , ({metadata: {tmpdir}}) -> await @tools.rubygems.remove diff --git a/packages/tools/test/rubygems/remove.coffee b/packages/tools/test/rubygems/remove.coffee index 6f905c57a..b6e4434e5 100644 --- a/packages/tools/test/rubygems/remove.coffee +++ b/packages/tools/test/rubygems/remove.coffee @@ -1,16 +1,16 @@ -nikita = require '@nikitajs/core/lib' -{tags, config, ruby} = require '../test' -they = require('mocha-they')(config) - -return unless tags.tools_rubygems +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.rubygems.remove', -> + return unless test.tags.tools_rubygems they 'remove an existing package', ({ssh}) -> nikita $ssh: ssh - ruby: ruby + ruby: test.ruby , -> await @tools.rubygems.install name: 'execjs' @@ -21,7 +21,7 @@ describe 'tools.rubygems.remove', -> they 'remove a non existing package', ({ssh}) -> nikita $ssh: ssh - ruby: ruby + ruby: test.ruby , -> await @tools.rubygems.install name: 'execjs' @@ -34,7 +34,7 @@ describe 'tools.rubygems.remove', -> they 'remove multiple versions', ({ssh}) -> nikita $ssh: ssh - ruby: ruby + ruby: test.ruby , -> await @tools.rubygems.install name: 'execjs' diff --git a/packages/tools/test/ssh/keygen.coffee b/packages/tools/test/ssh/keygen.coffee index a04caed44..873a28d80 100644 --- a/packages/tools/test/ssh/keygen.coffee +++ b/packages/tools/test/ssh/keygen.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require '../test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from '../test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.ssh.keygen', -> + return unless test.tags.posix they 'a new key', ({ssh}) -> nikita diff --git a/packages/tools/test/sysctl.coffee b/packages/tools/test/sysctl.coffee index af72a81a8..690b4f5a5 100644 --- a/packages/tools/test/sysctl.coffee +++ b/packages/tools/test/sysctl.coffee @@ -1,11 +1,11 @@ -nikita = require '@nikitajs/core/lib' -{tags, config} = require './test' -they = require('mocha-they')(config) - -return unless tags.posix +import nikita from '@nikitajs/core' +import test from './test.coffee' +import mochaThey from 'mocha-they' +they = mochaThey(test.config) describe 'tools.sysctl', -> + return unless test.tags.posix they 'Write properties', ({ssh}) -> nikita diff --git a/packages/tools/test/test.coffee b/packages/tools/test/test.coffee index f17f2b1bb..ccc602447 100644 --- a/packages/tools/test/test.coffee +++ b/packages/tools/test/test.coffee @@ -1,14 +1,24 @@ -fs = require 'fs' +import fs from 'node:fs/promises' +import * as url from 'node:url' +dirname = new URL( '.', import.meta.url).pathname + +exists = (path) -> + try + await fs.access path, fs.constants.F_OK + true + catch + false + # Write default configuration if not process.env['NIKITA_TEST_MODULE'] and ( - not fs.existsSync("#{__dirname}/../test.js") and - not fs.existsSync("#{__dirname}/../test.json") and - not fs.existsSync("#{__dirname}/../test.coffee") + not await exists("#{dirname}/../test.js") and + not await exists("#{dirname}/../test.json") and + not await exists("#{dirname}/../test.coffee") ) - config = fs.readFileSync "#{__dirname}/../test.sample.coffee" - fs.writeFileSync "#{__dirname}/../test.coffee", config + config = await fs.readFile "#{dirname}/../test.sample.coffee" + await fs.writeFile "#{dirname}/../test.coffee", config # Read configuration -config = require process.env['NIKITA_TEST_MODULE'] or "../test.coffee" +config = await import(process.env['NIKITA_TEST_MODULE'] or "../test.coffee") # Export configuration -module.exports = config +export default config.default diff --git a/packages/tools/test/utils/iptables.coffee b/packages/tools/test/utils/iptables.coffee index ec8a098bf..28c679225 100644 --- a/packages/tools/test/utils/iptables.coffee +++ b/packages/tools/test/utils/iptables.coffee @@ -1,10 +1,9 @@ -iptables = require '../../lib/utils/iptables' -{tags} = require '../test' - -return unless tags.tools_iptables +import iptables from '@nikitajs/tools/utils/iptables' +import test from '../test.coffee' describe 'utils.iptables', -> + return unless test.tags.api describe 'normalize', ->