diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index bd82ac00..856922c0 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -42,8 +42,8 @@ jobs: - name: Configure environment run: | - neuro config login-with-token ${{ secrets.CLIENT_TEST_E2E_USER_NAME }} https://dev.neu.ro/api/v1 - neuro --color=no config show + apolo config login-with-token ${{ secrets.CLIENT_TEST_E2E_USER_NAME }} https://dev.neu.ro/api/v1 + apolo --color=no config show - uses: webfactory/ssh-agent@v0.7.0 # Authenticates ssh on worker to pull repos under github.com/neuro-actions via ssh @@ -58,6 +58,6 @@ jobs: - name: Run tests env: - NEURO_USER: ${{ secrets.CLIENT_TEST_E2E_USER_NAME }} + APOLO_USER: ${{ secrets.CLIENT_TEST_E2E_USER_NAME }} run: | make test diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9616e51c..ee119099 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,24 +1,24 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: 'v4.4.0' + rev: 'v4.6.0' hooks: - id: check-merge-conflict - repo: https://github.com/asottile/yesqa - rev: v1.4.0 + rev: v1.5.0 hooks: - id: yesqa - repo: https://github.com/PyCQA/isort - rev: '5.12.0' + rev: '5.13.2' hooks: - id: isort - repo: https://github.com/psf/black - rev: '23.3.0' + rev: '24.8.0' hooks: - id: black language_version: python3 # Should be a command that runs python3.6+ args: ['--line-length', '88'] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: 'v4.4.0' + rev: 'v4.6.0' hooks: - id: check-case-conflict - id: check-yaml @@ -32,17 +32,17 @@ repos: - id: debug-statements # Another entry is required to apply file-contents-sorter to another file - repo: https://github.com/pre-commit/pre-commit-hooks - rev: 'v4.4.0' + rev: 'v4.6.0' hooks: - id: file-contents-sorter files: | .gitignore - repo: https://github.com/asottile/pyupgrade - rev: 'v3.3.2' + rev: 'v3.17.0' hooks: - id: pyupgrade args: ['--py36-plus'] - repo: https://github.com/PyCQA/flake8 - rev: '6.0.0' + rev: '7.1.1' hooks: - id: flake8 diff --git a/LICENSE b/LICENSE index e9e46a96..eb020293 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2019 Neuromation Ltd. + Copyright 2024 Apolo Cloud Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/Makefile b/Makefile index 22c5fd12..95c591bd 100644 --- a/Makefile +++ b/Makefile @@ -1,12 +1,12 @@ LINTER_DIRS := tests -NEURO_COMMAND=neuro --verbose --show-traceback --color=no +APOLO_COMMAND=apolo --verbose --show-traceback --color=no TMP_DIR := $(shell mktemp -d) VERSION_FILE := version.txt .PHONY: setup init setup init: pip install -r requirements/dev.txt - pipx install neuro-all + pipx install apolo-all pre-commit install .PHONY: get-version @@ -36,8 +36,8 @@ test: .PHONY: changelog-draft changelog-draft: update-version $(VERSION_FILE) - towncrier --draft --name "Neuro Platform Flow Template" --version `cat version.txt` + towncrier --draft --name "Apolo Platform Flow Template" --version `cat version.txt` .PHONY: changelog changelog: update-version $(VERSION_FILE) - towncrier --name "Neuro Platform Flow Template" --version `cat version.txt` --yes + towncrier --name "Apolo Platform Flow Template" --version `cat version.txt` --yes diff --git a/README.md b/README.md index 8d0e41ca..2d86c35e 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ -Neu.ro Platform Project Template +Apolo Platform Flow Template =============================== -Base template for the [Neu.ro](https://neu.ro) platform projects based on [cookiecutter project](https://github.com/cookiecutter/cookiecutter). +Base template for the [apolo.us](https://apolo.us) platform projects based on [cookiecutter template](https://github.com/cookiecutter/cookiecutter). Usage ----- diff --git a/RELEASE.md b/RELEASE.md index 06ac855f..efa88bc1 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,7 +1,7 @@ Release instructions ==================== -Neuro CLI [uses `release` branch](https://github.com/neuro-inc/platform-client-python/blob/d00a75504d665acdbcdda24f3999ee4b2223054d/neuromation/cli/project.py#L43-L48) to scaffold projects, so to do a release we need to update `release` branch. +Apolo CLI uses `release` branch to scaffold flow configuration and structure. Instructions: @@ -16,16 +16,16 @@ Instructions: ``` $ cookiecutter gh:neuro-inc/cookiecutter-neuro-project -c master flow_name [My flow]: - flow_dir [neuro project]: - flow_id [neuro_project]: + flow_dir [my flow]: + flow_id [my_flow]: code_directory [modules]: - preserve Neuro Flow template hints [yes]: - $ cd neuro project + preserve Apolo Flow template hints [yes]: + $ cd my project $ ls Dockerfile HELP.md README.md apt.txt config data modules notebooks requirements.txt results setup.cfg update_actions.py - $ neuro-flow build train - $ neuro-flow upload ALL - $ neuro-flow run jupyter + $ apolo-flow build train + $ apolo-flow upload ALL + $ apolo-flow run jupyter ... ``` 3. Generate changelog: diff --git a/cookiecutter.json b/cookiecutter.json index 09e47c93..30a864e0 100644 --- a/cookiecutter.json +++ b/cookiecutter.json @@ -4,7 +4,7 @@ "flow_dir": "{{ cookiecutter.flow_name.lower() }}", "flow_id": "{{ cookiecutter.flow_name.lower().replace(' ', '_').replace('-', '_') }}", "code_directory": "modules", - "preserve Neuro Flow template hints": "yes", + "preserve Apolo Flow template hints": "yes", "_copy_without_render": [ ".github/*" ] diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index c8b99340..068bc9c2 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -9,10 +9,10 @@ try: import asyncio - import neuro_sdk + import apolo_sdk async def get_project_name() -> str: - async with await neuro_sdk.get() as client: + async with await apolo_sdk.get() as client: return client.config.project_name_or_raise PROJECT_NAME = asyncio.run(get_project_name()) @@ -20,15 +20,16 @@ async def get_project_name() -> str: except Exception: import subprocess - if shutil.which("neuro"): + if shutil.which("apolo"): result = subprocess.run( - ["neuro", "config", "show"], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ["apolo", "config", "show"], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) if result.returncode == 0: cli_output = result.stdout.decode().splitlines() for line in cli_output: if "current project" in line.lower(): PROJECT_NAME = line.split()[2] + break if PROJECT_NAME: proj_file = Path("./.neuro/project.yml") content = proj_file.read_text() @@ -58,7 +59,7 @@ async def get_project_name() -> str: "false": False, } PRESERVE_HINTS_ANSWER = ( - "{{ cookiecutter['preserve Neuro Flow template hints'] | lower }}" + "{{ cookiecutter['preserve Apolo Flow template hints'] | lower }}" ) if PRESERVE_HINTS_ANSWER not in PRESERVE_HINTS_VARIANS: print( diff --git a/requirements/base.txt b/requirements/base.txt index b9a1858c..f5cfbb1f 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1 +1 @@ -cookiecutter==2.1.1 +cookiecutter==2.6.0 diff --git a/tests/unit/test_bake_project.py b/tests/unit/test_bake_project.py index b304d947..413a0266 100644 --- a/tests/unit/test_bake_project.py +++ b/tests/unit/test_bake_project.py @@ -140,7 +140,7 @@ def test_flow_name( if venv_install_packages: venv.install_package(venv_install_packages, installer="pip") venv.run( - "neuro config login-with-token $NEURO_USER https://dev.neu.ro/api/v1" + "neuro config login-with-token $APOLO_USER https://dev.neu.ro/api/v1" ) venv.run( diff --git a/version.txt b/version.txt index df750d2d..c2448104 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -v23.07.10 +v24.9.0 diff --git a/{{cookiecutter.flow_dir}}/.github/workflows/update-flow-action-versions.yml b/{{cookiecutter.flow_dir}}/.github/workflows/update-flow-action-versions.yml new file mode 100644 index 00000000..0b8c99dd --- /dev/null +++ b/{{cookiecutter.flow_dir}}/.github/workflows/update-flow-action-versions.yml @@ -0,0 +1,33 @@ +name: Update apolo-flow action tags +on: + schedule: + - cron: 0 0 * * * +jobs: + auto-update: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install dependencies + run: pip install -U "PyGithub==1.55" + - id: run_updater + name: Run update actions script + run: python update_actions.py ".neuro/*.y*ml" + - id: generate_token + uses: tibdex/github-app-token@v1 + with: + app_id: ${{ secrets.BOT_APP_ID }} + private_key: ${{ secrets.BOT_PRIVATE_KEY }} + - name: Create Pull Request + uses: peter-evans/create-pull-request@v3.10.1 + with: + token: ${{ steps.generate_token.outputs.token }} + branch: update/actions-tags + title: Auto-update actions tags + commit-message: Auto-update apolo-flow actions tags + body: | + Updated apolo-flow action version tags in the following + files: ${{ steps.run_updater.outputs.updated_files }} diff --git a/{{cookiecutter.flow_dir}}/.github/workflows/update-neuro-flow-actions.yml b/{{cookiecutter.flow_dir}}/.github/workflows/update-neuro-flow-actions.yml deleted file mode 100644 index 10407a27..00000000 --- a/{{cookiecutter.flow_dir}}/.github/workflows/update-neuro-flow-actions.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: Update neuro-flow action tags -on: - schedule: - - cron: 0 0 * * * -jobs: - auto-update: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - name: Install dependencies - run: pip install -U "PyGithub==1.55" - - id: run_updater - name: Run update actions script - run: python update_actions.py ".neuro/*.y*ml" - - id: generate_token - uses: tibdex/github-app-token@v1 - with: - app_id: ${{ secrets.BOT_APP_ID }} - private_key: ${{ secrets.BOT_PRIVATE_KEY }} - - name: Create Pull Request - uses: peter-evans/create-pull-request@v3.10.1 - with: - token: ${{ steps.generate_token.outputs.token }} - branch: update/actions-tags - title: Auto-update actions tags - commit-message: Auto-update neuro-flow actions tags - body: | - Updated neuro-flow action version tags in the following - files: ${{ steps.run_updater.outputs.updated_files }} diff --git a/{{cookiecutter.flow_dir}}/.gitignore b/{{cookiecutter.flow_dir}}/.gitignore index 600ed7b9..f458bf93 100644 --- a/{{cookiecutter.flow_dir}}/.gitignore +++ b/{{cookiecutter.flow_dir}}/.gitignore @@ -1,4 +1,4 @@ -# Neuro Template: +# Apolo Flow Template: /data/**/* !/**/.gitkeep diff --git a/{{cookiecutter.flow_dir}}/.neuro/live.yml b/{{cookiecutter.flow_dir}}/.neuro/live.yml index 801aa65f..b34db990 100644 --- a/{{cookiecutter.flow_dir}}/.neuro/live.yml +++ b/{{cookiecutter.flow_dir}}/.neuro/live.yml @@ -1,7 +1,7 @@ kind: live ## Required. Type of workflow, might be one of the following: -## - 'live' -- full reference at https://neu-ro.gitbook.io/neuro-flow/reference/live-workflow-syntax -## - 'batch' -- full reference at https://neu-ro.gitbook.io/neuro-flow/reference/batch-workflow-syntax +## - 'live' -- full reference at https://docs.apolo.us/apolo-flow-reference/workflow-syntax/live-workflow-syntax +## - 'batch' -- full reference at https://docs.apolo.us/apolo-flow-reference/workflow-syntax/batch-workflow-syntax # id: ## Optional. Identifier of the workflow. By default, the id is 'live'. It's available as $[[ flow.flow_id ]] in experssions. ## Note: Not to be confused with $[[ flow.project_id ]], which is a different context defined in the `project.yml` file. @@ -25,7 +25,7 @@ defaults: # key2: value2 ## A mapping of environment variables that will be set in all jobs of the workflow. ## When two or more environment variables are defined with the same name, - ## `neuro-flow` uses the most specific environment variable. + ## `apolo-flow` uses the most specific environment variable. ## For example, an environment variable defined in a job will override the workflow's default. # volumes: # - storage:some/path:/path/in/job @@ -46,14 +46,14 @@ defaults: images: ## Optional section, a mapping of image definitions used by the workflow. train: - ## `neuro-flow build train` creates an image from the passed Dockerfile and uploads it to the Neu.ro Registry. + ## `apolo-flow build train` creates an image from the passed Dockerfile and uploads it to the Neu.ro Registry. ## The $[[ images.img_id.ref ]] expression can be used for pointing to an image from jobs..image. ref: image:/$[[ project.project_name ]]/$[[ flow.project_id ]]:v1 ## Required. Image reference, can be of two types: - ## - Platform-hosted image - its reference should start with the 'image:' prefix. `neuro-flow build ` will work in this case. - ## - Image hosted on DockerHub - without the 'image:' prefix. In this case, `neuro-flow build ` will not work. + ## - Platform-hosted image - its reference should start with the 'image:' prefix. `apolo-flow build ` will work in this case. + ## - Image hosted on DockerHub - without the 'image:' prefix. In this case, `apolo-flow build ` will not work. ## Check ./neuro/project.yaml to configure the $[[ flow.project_id ]] part. - ## During job execution, the '$[[ flow.project_id ]]' part will be replaced with its string value by the Neuro-Flow engine. + ## During job execution, the '$[[ flow.project_id ]]' part will be replaced with its string value by the Apolo-Flow engine. ## Hint: You can use the embedded `hash_files()` function to generate a built image's tag based on its content. ## Example: ## train: @@ -109,7 +109,7 @@ volumes: ## Optional. Volumes can also be assotiated with folders on a local machine. ## A local path should be relative to the project's root. ## If this parameter is specified, the volume content can be synchronized between the local machine and a storage folder (but not a disk!) - ## with the help of `neuro-flow upload` and `neuro-flow download` commands. + ## with the help of `apolo-flow upload` and `apolo-flow download` commands. # read-only: true ## The volume is mounted as read-only by default if this attribute is set, read-write mode is used otherwise. code: @@ -134,14 +134,17 @@ volumes: local: . jobs: -## A live workflow can run jobs by their identifiers ('job-id') using the `neuro-flow run ` command. +## A live workflow can run jobs by their identifiers ('job-id') using the `apolo-flow run ` command. ## Each job runs remotely on the Neu.ro Platform. ## Jobs could be defined in two different ways: ## 1. Directly in this file; -## 2. In a separate file (on a local machine or in a Git repository) and reused as an 'action'; +## 2. In a separate file (on a local machine or in a Git repository) and reused as an 'action' or 'module'. +## 3. Inheriting some attributes from the mixins ## Checkout full documentation at the respective pages: -## 1. https://neu-ro.gitbook.io/neuro-flow/reference/live-workflow-syntax#jobs -## 2. https://neu-ro.gitbook.io/neuro-flow/reference/actions-syntax +## 1. https://docs.apolo.us/apolo-flow-reference/workflow-syntax/live-workflow-syntax#jobs +## 2. https://docs.apolo.us/apolo-flow-reference/workflow-syntax/actions-syntax and https://docs.apolo.us/apolo-flow-reference/modules +## 3. https://docs.apolo.us/apolo-flow-reference/mixins +## 4. remote_debug: ## Each job must have an associated Job ID (a.k.a. job name) within the project. ## The key 'job-id' is a string and its value is a map of the job's configuration data or action call. @@ -154,7 +157,7 @@ jobs: ## - `workspace:` or `ws:` for action files that are stored locally ## - `github:` or `gh:` for actions that are bound to a GitHub repository ## In this particular case, we are using a GitHub repository https://github.com/neuro-actions/remote_debug under the `@1.0.0` tag. - ## To run this job, Neuro-Flow will fetch the 'action.yaml' file from the repository and execute the job defined in it. + ## To run this job, Apolo-Flow will fetch the 'action.yaml' file from the repository and execute the job defined in it. args: ## Optional action-specific mapping of values that will be passed to the actions as arguments. ## They should correspond to inputs defined in the action file. @@ -178,11 +181,11 @@ jobs: # name2: ~ # None by default # name3: "" # Empty string by default ## Optional. Params is a mapping of key-value pairs that have default values and can be overridden - ## from the command line by using `neuro-flow run --param name1 val1 --param name2 val2`. + ## from the command line by using `apolo-flow run --param name1 val1 --param name2 val2`. ## Parameters can be specified in 'short' and 'long' forms - the example above is in a short form. ## The short form is compact, but only allows to specify the parameter's name and default value. ## The long form allows to additionally specify parameter descriptions. - ## This can be useful for `neuro-flow run` command introspection, shell autocompletion, + ## This can be useful for `apolo-flow run` command introspection, shell autocompletion, ## and generation of more detailed error messages. Example: # params: # name1: @@ -196,7 +199,7 @@ jobs: # browse: true ## Whether to open the job's HTTP URI in a browser after job startup. `False` by default. # detach: true - ## By default, 'neuro-flow run ' keeps the terminal attached to the spawned job. + ## By default, 'apolo-flow run ' keeps the terminal attached to the spawned job. ## This can help with viewing the job's logs and running commands in its embedded bash session. ## Enable the `detach` attribute to disable this behavior. # entrypoint: sh -c "echo $HOME" @@ -218,17 +221,17 @@ jobs: # name: my-job-name ## Specifies an optional job name. ## This name becomes a part of the job's internal hostname and exposed HTTP URL. - ## The job can then be controlled by its name through the low-level `neuro` tool. + ## The job can then be controlled by its name through the low-level `apolo` CLI. ## If the name is not specified in the `name` attribute, the default one will be generated as follows: ## '-[-]'. # multi: true ## By default, a job can only have one running instance at a time. - ## Calling 'neuro-flow run ' with the same job ID for a second time + ## Calling 'apolo-flow run ' with the same job ID for a second time ## will attach to the already running job instead of creating a new one. ## This can be optionally overridden by enabling the 'multi' attribute. # pass_config: true ## Attach your Neu.ro authentication data and config into the job. - ## Can be usefull if you want to use Neuro CLI inside the running job. + ## Can be usefull if you want to use Apolo CLI inside the running job. ## Note: the lifetime of passed credentials is bound to the job's lifetime. ## It will be impossible to use them when the job is terminated. # port_forward: @@ -239,7 +242,7 @@ jobs: ## You can use this feature, for instance, to access a DB running in the job for debugging. # preset: cpu-small ## A resource preset used to run the job. - ## This overwrites the system-default (first in the 'neuro config show' list) and workflow-default configurations. + ## This overwrites the system-default (first in the 'apolo config show' list) and workflow-default configurations. ## Consider selecting the resource preset separately for each job according to your needs. # schedule_timeout: 1d ## Set an optional schedule timeout to the specified value. @@ -264,9 +267,9 @@ jobs: ## You can specify a plain string for the volume reference and use $[[ volumes..ref ]] expressions. - $[[ volumes.data.ref_ro ]] - $[[ upload(volumes.code).ref_ro ]] - ## upload() - is an expression function which performs `neuro-flow upload code` before each run of this job + ## upload() - is an expression function which performs `apolo-flow upload code` before each run of this job ## Check this list of magic functions and their use-cases under - ## https://neu-ro.gitbook.io/neuro-flow/reference/expression-functions + ## https://docs.apolo.us/apolo-flow-reference/expression-functions - $[[ volumes.config.ref_ro ]] - $[[ volumes.results.ref_rw ]] # workdir: /users/my_user diff --git a/{{cookiecutter.flow_dir}}/.neuro/project.yml b/{{cookiecutter.flow_dir}}/.neuro/project.yml index e03c1f18..510188de 100644 --- a/{{cookiecutter.flow_dir}}/.neuro/project.yml +++ b/{{cookiecutter.flow_dir}}/.neuro/project.yml @@ -1,4 +1,4 @@ -## Check our full reference documentation at https://neu-ro.gitbook.io/neuro-flow/reference/project-configuration-syntax +## Check our full reference documentation at https://docs.apolo.us/apolo-flow-reference/workflow-syntax/project-configuration-syntax id: {{ cookiecutter.flow_id }} ## Could be referenced as $[[ project.id ]] or $[[ flow.project_id ]] contexts. ## Default value: name of the folder containing `.neuro` diff --git a/{{cookiecutter.flow_dir}}/Dockerfile b/{{cookiecutter.flow_dir}}/Dockerfile index 4d38785a..8c05424c 100644 --- a/{{cookiecutter.flow_dir}}/Dockerfile +++ b/{{cookiecutter.flow_dir}}/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/neuro-inc/base:v22.12.0-runtime +FROM ghcr.io/neuro-inc/base:v23.8.0-runtime COPY apt.txt . RUN export DEBIAN_FRONTEND=noninteractive && \ @@ -13,4 +13,4 @@ COPY setup.cfg . COPY requirements.txt . RUN pip install --progress-bar=off -U --no-cache-dir -r requirements.txt -RUN ssh-keygen -f /id_rsa -t rsa -N neuro -q +RUN ssh-keygen -f /id_rsa -t rsa -N apolo -q diff --git a/{{cookiecutter.flow_dir}}/HELP.md b/{{cookiecutter.flow_dir}}/HELP.md index d2504b06..8928b3a6 100644 --- a/{{cookiecutter.flow_dir}}/HELP.md +++ b/{{cookiecutter.flow_dir}}/HELP.md @@ -1,13 +1,13 @@ -# Neuro Project Template Reference +# Apolo Flow Template Reference ## Development environment -This template runs on the [Neuro Platform](https://neu.ro). +This template runs on the [Apolo Platform](https://apolo.us). -To dive into problem solving, you need to sign up at the [Neuro Platform](https://neu.ro) website, set up your local machine according to the [instructions](https://neu.ro/docs), and login to Neuro CLI: +To dive into problem solving, you need to sign up at the [Apolo Platform](https://neu.ro) website, set up your local machine according to the [instructions](https://neu.ro/docs), and login to Apolo CLI: ```shell -neuro login +apolo login ``` ## Directory structure @@ -22,24 +22,24 @@ neuro login ## Development -Follow the instructions below to set up the environment on Neuro and start a Jupyter development session. +Follow the instructions below to set up the environment on the platform and start a Jupyter development session. ### Setting up the development environment ```shell -neuro-flow build myimage +apolo-flow build myimage ``` Command results: * The `requirements.txt`, `apt.txt`, and `setup.cfg` files from the local project are uploaded to the platform storage. -* A new job is started in our [base environment](https://hub.docker.com/r/neuromation/base). +* A new job is started in our [base environment](https://github.com/neuro-inc/neuro-base-environment/pkgs/container/base). * Pip requirements from `requirements.txt` and `apt` applications from `apt.txt` are installed in the same environment. * The updated environment is saved under a new project-dependent name to be used further on. ### Running Jupyter with GPU ```shell -neuro-flow run jupyter +apolo-flow run jupyter ``` Command results: @@ -50,16 +50,16 @@ Command results: ### Killing Jupyter ```shell -neuro-flow kill jupyter +apolo-flow kill jupyter ``` Command results: -* The job with Jupyter Notebooks is terminated. The notebooks are saved on the platform storage. You may run `neuro-flow download notebooks` to download them to the local `notebooks/` directory. +* The job with Jupyter Notebooks is terminated. The notebooks are saved on the platform storage. You may run `apolo-flow download notebooks` to download them to the local `notebooks/` directory. ### Memory management -If you're not using the default `neuromation/base` base image, you may want to protect the main processes in your jobs from being killed when there's not enough memory for them. +If you're not using the default base image, you may want to protect the main processes in your jobs from being killed when there's not enough memory for them. You can do this in two steps: @@ -92,7 +92,7 @@ This will ensure the script from step 1 is executed every minute. ### Help ```shell -neuro-flow ps +apolo-flow ps ``` Command results: @@ -104,14 +104,14 @@ Command results: ### Uploading to the Storage via Web UI -On a local machine, run `neuro-flow run filebrowser` and open the job's URL on your mobile device or desktop. +On a local machine, run `apolo-flow run filebrowser` and open the job's URL on your mobile device or desktop. Through a simple file explorer interface, you can upload test images and perform various file operations. ### Uploading to the Storage via CLI -On a local machine, run `neuro-flow mkvolumes`. This command creates storage folders for all defined volumes. You only need to run this once. +On a local machine, run `apolo-flow mkvolumes`. This command creates storage folders for all defined volumes. You only need to run this once. -After the storage folders have been created, run `neuro-flow upload data` from the a local machine as well. This command pushes local files stored in `./data` into the `storage:{{ cookiecutter.flow_id }}/data` volume mounted to your development environment's `/project/data`. +After the storage folders have been created, run `apolo-flow upload data` from the a local machine as well. This command pushes local files stored in `./data` into the `storage:{{ cookiecutter.flow_id }}/data` volume mounted to your development environment's `/project/data`. You can upload (or download) every folder for which the `local` parameter is specified in the [live.yml file](./.neuro/live.yml). @@ -129,21 +129,10 @@ Feel free to refer to the [documentation](https://docs.neu.ro/toolbox/accessing- ### Running a development job -If you want to debug your code on GPU, you can run a sleeping job via `neuro-flow run remote_debug` which will also open a shell to the job. You can also see job logs via `neuro-flow logs remote_debug`. The job forwards your local port 2211 to its port 22 for remote debugging. +If you want to debug your code on GPU, you can run a sleeping job via `apolo-flow run remote_debug` which will also open a shell to the job. You can also see job logs via `apolo-flow logs remote_debug`. The job forwards your local port 2211 to its port 22 for remote debugging. You can find the instructions on remote debugging via PyCharm Pro in the [documentation](https://neu.ro/docs/remote_debugging_pycharm). -Please don't forget to kill your job via `neuro-flow kill remote_debug` to not waste your quota! - -### Weights & Biases integration - -The Neuro Platform offers easy integration with [Weights & Biases](https://www.wandb.com), an experiment tracking tool for deep learning. - -Here you can find [documentation](https://docs.neu.ro/toolbox/experiment-tracking-with-weights-and-biases) for using W&B for experiment tracking with the template. - -You can also refer to instructions on using Weights & Biases in your code in the [W&B documentation](https://docs.wandb.com/library/api/examples). -There are also [W&B example projects](https://github.com/wandb/examples) or an example of a Neuro Project Template-based -[ML Recipe that uses W&B as a part of the workflow](https://neu.ro/docs/cookbook/ml-recipe-hier-attention). - +Please don't forget to kill your job via `apolo-flow kill remote_debug` to not waste your quota! ### Training machine types @@ -152,7 +141,7 @@ defaults: preset: gpu-small-p ``` -There are several machine types available on the platform. Run `neuro config show` to see the full list. You can also override default presets for each job: +There are several machine types available on the platform. Run `apolo config show` to see the full list. You can also override default presets for each job: ```yaml jobs: @@ -173,7 +162,7 @@ args: ### Storage uploads -Running `neuro-flow upload ALL` from a local machine will upload all of your code, configs, and notebooks to the storage so that these folders can be used by your jobs. +Running `apolo-flow upload ALL` from a local machine will upload all of your code, configs, and notebooks to the storage so that these folders can be used by your jobs. ### The training command @@ -197,27 +186,21 @@ To tweak the training command, change the last line in this section of `live.yam python -u $[[ volumes.code.mount ]]/train.py --data $[[ volumes.data.mount ]] ``` -After this, just run `neuro-flow run train`. +After this, just run `apolo-flow run train`. ### Multiple training jobs You can run multiple training experiments simultaneously. ```shell -neuro-flow run multitrain -s myidea-1 +apolo-flow run multitrain -s myidea-1 ``` -Note that this label becomes a suffix of the job name which can only contain alphanumeric characters and hyphens `-`, cannot end with a hyphen, and cannot be longer than 40 characters. You can use suffixed job names to access jobs: `neuro-flow status multitrain myidea-1`, `neuro-flow logs multitrain myidea-1`, and so on. +Note that this label becomes a suffix of the job name which can only contain alphanumeric characters and hyphens `-`, cannot end with a hyphen, and cannot be longer than 40 characters. You can use suffixed job names to access jobs: `apolo-flow status multitrain myidea-1`, `apolo-flow logs multitrain myidea-1`, and so on. Please don't forget to kill the jobs you started: -- `neuro-flow kill train` to kill the training job started via `neuro-flow run train`, -- `neuro-flow kill multitrain` to kill the training job started via `neuro-flow run multitrain`, -- `neuro-flow kill jupyter` to kill the job started via `neuro-flow run jupyter`, +- `apolo-flow kill train` to kill the training job started via `apolo-flow run train`, +- `apolo-flow kill multitrain` to kill the training job started via `apolo-flow run multitrain`, +- `apolo-flow kill jupyter` to kill the job started via `apolo-flow run jupyter`, - ... -- `neuro-flow kill ALL` to kill all jobs started in the current project. - -### Multi-threaded hyperparameter tuning - -The Neuro Platform supports hyperparameter tuning via [Weights & Biases](https://www.wandb.com/articles/running-hyperparameter-sweeps-to-pick-the-best-model-using-w-b). - -Please refer to the corresponding [documentation](https://docs.neu.ro/toolbox/hyperparameter-tuning-with-weights-and-biases) for more information. +- `apolo-flow kill ALL` to kill all jobs started in the current project. diff --git a/{{cookiecutter.flow_dir}}/README.md b/{{cookiecutter.flow_dir}}/README.md index 85944a74..526b271b 100644 --- a/{{cookiecutter.flow_dir}}/README.md +++ b/{{cookiecutter.flow_dir}}/README.md @@ -5,16 +5,16 @@ ## Quick Start -Sign up at [neu.ro](https://neu.ro) and setup your local machine according to [instructions](https://docs.neu.ro/). +Sign up at [apolo](https://console.apolo.us) and setup your local machine according to [instructions](https://docs.apolo.us/). Then run: ```shell pip install -U pipx -pipx install neuro-all -neuro login -neuro-flow build train -neuro-flow run jupyter +pipx install apolo-all +apolo login +apolo-flow build train +apolo-flow run jupyter ``` -See [Help.md](HELP.md) for the detailed Neuro Project Template Reference. +See [Help.md](HELP.md) for the detailed flow template reference. diff --git a/{{cookiecutter.flow_dir}}/notebooks/demo.ipynb b/{{cookiecutter.flow_dir}}/notebooks/demo.ipynb index 1a628468..cda8b451 100644 --- a/{{cookiecutter.flow_dir}}/notebooks/demo.ipynb +++ b/{{cookiecutter.flow_dir}}/notebooks/demo.ipynb @@ -12,22 +12,28 @@ }, { "cell_type": "markdown", - "source": [ - "The code from '{{cookiecutter.code_directory}}' directory is mounted under /project/modules, which is set as env PYTHONPATH.\n", - "Therefore, you could import modules from this folder as is.\n", - "\n", - "For more info (other mounting paths, parameters, etc), refer to [neuro-flow jupyter action repository](https://github.com/neuro-actions/jupyter)." - ], "metadata": { "collapsed": false, "pycharm": { "name": "#%% md\n" } - } + }, + "source": [ + "The code from '{{cookiecutter.code_directory}}' directory is mounted under /project/modules, which is set as env PYTHONPATH.\n", + "Therefore, you could import modules from this folder as is.\n", + "\n", + "For more info (other mounting paths, parameters, etc), refer to [apolo-flow jupyter action repository](https://github.com/neuro-actions/jupyter)." + ] }, { "cell_type": "code", "execution_count": null, + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + }, "outputs": [], "source": [ "from train import train, get_parser\n", @@ -36,13 +42,7 @@ "args = arg_parser.parse_args([\"--data_dir\", \"/project/data\"])\n", "\n", "train(args)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] } ], "metadata": { diff --git a/{{cookiecutter.flow_dir}}/update_actions.py b/{{cookiecutter.flow_dir}}/update_actions.py index f77afced..1c2129be 100644 --- a/{{cookiecutter.flow_dir}}/update_actions.py +++ b/{{cookiecutter.flow_dir}}/update_actions.py @@ -33,7 +33,7 @@ def parse_args() -> argparse.Namespace: "patterns", metavar="PATTERN", nargs="+", - help="Neuro-flow workflow file, which should be scanned for action updates.", + help="Apolo-flow workflow file, which should be scanned for action updates.", ) parser.add_argument("--token", nargs="?", help="GitHub token to use.") parser.add_argument(