From 84868a898fc1a9b1793925eb697807e8cd53d56d Mon Sep 17 00:00:00 2001 From: Yevhenii Semendiak Date: Thu, 5 Sep 2024 12:24:19 +0300 Subject: [PATCH] PRO-49: Bump apolo and other dependencies --- .github/workflows/ci.yml | 224 +++++++++++++++++----------------- .pre-commit-config.yaml | 20 +-- .python-version | 1 + CLI.md | 4 +- Dockerfile | 2 +- Makefile | 2 +- pyproject.toml | 3 + requirements/python-base.txt | 3 +- requirements/python-dev.txt | 10 +- setup.cfg | 45 +++++++ setup.py | 30 +---- tests/integration/conftest.py | 26 ++-- tests/integration/test_api.py | 4 +- wabucketref/__init__.py | 2 +- wabucketref/api.py | 48 ++++---- wabucketref/cli.py | 12 +- 16 files changed, 229 insertions(+), 207 deletions(-) create mode 100644 .python-version diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5c87c63..df66878 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,42 +20,42 @@ jobs: outputs: version: ${{ steps.version.outputs.version }} steps: - - name: Checkout commit - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha }} - - - name: Setup Python 3.11 - uses: actions/setup-python@v4 - with: - python-version: '3.11' - - - name: Cache Python and its deps - uses: actions/cache@v3 - with: - key: ${{ runner.os }}-python-3.11-${{ hashFiles('requirements/*.txt') }} - path: ${{ env.pythonLocation }} - - - name: Cache pre-commit hooks - uses: actions/cache@v2 - with: - path: ~/.cache/pre-commit - key: ${{ runner.os }}-pre-commit-python-3.11-${{ hashFiles('.pre-commit-config.yaml') }} - - - name: Install dependencies - run: | - python -m pip install -U pip - make setup - - - name: Run linters - run: | - echo Linting ${{ github.event.pull_request.head.sha || github.sha }} - make lint - - - name: Save the package version - id: version - run: | - echo "version=$(python setup.py --version)" >> $GITHUB_OUTPUT + - name: Checkout commit + uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: Setup Python 3.11 + uses: actions/setup-python@v4 + with: + python-version: "3.11" + + - name: Cache Python and its deps + uses: actions/cache@v3 + with: + key: ${{ runner.os }}-python-3.11-${{ hashFiles('requirements/*.txt') }} + path: ${{ env.pythonLocation }} + + - name: Cache pre-commit hooks + uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: ${{ runner.os }}-pre-commit-python-3.11-${{ hashFiles('.pre-commit-config.yaml') }} + + - name: Install dependencies + run: | + python -m pip install -U pip + make setup + + - name: Run linters + run: | + echo Linting ${{ github.event.pull_request.head.sha || github.sha }} + make lint + + - name: Save the package version + id: version + run: | + echo "version=$(python setup.py --version)" >> $GITHUB_OUTPUT test: if: | @@ -65,46 +65,46 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 15 steps: - - name: Checkout commit - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha }} - - - name: Setup Python 3.11 - uses: actions/setup-python@v4 - with: - python-version: '3.11' - - - name: Cache Python and its deps - uses: actions/cache@v3 - with: - key: ${{ runner.os }}-python-3.11-${{ hashFiles('requirements/*.txt') }} - path: ${{ env.pythonLocation }} - - - name: Cache pre-commit hooks - uses: actions/cache@v2 - with: - path: ~/.cache/pre-commit - key: ${{ runner.os }}-pre-commit-python-3.11-${{ hashFiles('.pre-commit-config.yaml') }} - - - name: Install dependencies - run: | - python -m pip install -U pip - make setup - - - name: Configure Neu.ro CLI - env: - NEURO_TOKEN: ${{ secrets.COOKIECUTTER_TEST_E2E_TOKEN }} - NEURO_STAGING_URL: ${{ secrets.NEURO_STAGING_URL }} - run: | - neuro config login-with-token ${{ env.NEURO_TOKEN }} ${{ env.NEURO_STAGING_URL }} - neuro config switch-cluster onprem-poc - - - name: Run tests - env: - WANDB_API_KEY: ${{ secrets.WANDB_API_KEY }} - run: | - make test + - name: Checkout commit + uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: Setup Python 3.11 + uses: actions/setup-python@v4 + with: + python-version: "3.11" + + - name: Cache Python and its deps + uses: actions/cache@v3 + with: + key: ${{ runner.os }}-python-3.11-${{ hashFiles('requirements/*.txt') }} + path: ${{ env.pythonLocation }} + + - name: Cache pre-commit hooks + uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: ${{ runner.os }}-pre-commit-python-3.11-${{ hashFiles('.pre-commit-config.yaml') }} + + - name: Install dependencies + run: | + python -m pip install -U pip + make setup + + - name: Configure Neu.ro CLI + env: + NEURO_TOKEN: ${{ secrets.COOKIECUTTER_TEST_E2E_TOKEN }} + NEURO_STAGING_URL: ${{ secrets.NEURO_STAGING_URL }} + run: | + apolo config login-with-token ${{ env.NEURO_TOKEN }} ${{ env.NEURO_STAGING_URL }} + apolo config switch-cluster onprem-poc + + - name: Run tests + env: + WANDB_API_KEY: ${{ secrets.WANDB_API_KEY }} + run: | + make test deploy: name: Build and push Docker image @@ -112,39 +112,39 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') steps: - - name: Checkout commit - uses: actions/checkout@v3 - - - name: Sanity check for tag and version - run: | - export VERSION=${{ needs.lint.outputs.version }} - if [ "${{ github.ref }}" != "refs/tags/v$VERSION" ] - then - echo "Git tag '${{ github.ref }}' differs from hard-coded package version 'v$VERSION'" - exit 1 - else - echo "OK, git tag matches hard-coded package version: 'v$VERSION'" - fi - - - name: Login to Github container registry - uses: docker/login-action@v2 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Build - env: - VERSION: ${{ needs.lint.outputs.version }} - COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }} - run: | - docker build \ - -t "ghcr.io/neuro-inc/wabucketref:${VERSION}" \ - -t "ghcr.io/neuro-inc/wabucketref:latest" \ - --build-arg COMMIT_SHA=${COMMIT_SHA} \ - . - - name: Push - env: - VERSION: ${{ needs.lint.outputs.version }} - run: | - docker push ghcr.io/neuro-inc/wabucketref --all-tags + - name: Checkout commit + uses: actions/checkout@v3 + + - name: Sanity check for tag and version + run: | + export VERSION=${{ needs.lint.outputs.version }} + if [ "${{ github.ref }}" != "refs/tags/v$VERSION" ] + then + echo "Git tag '${{ github.ref }}' differs from hard-coded package version 'v$VERSION'" + exit 1 + else + echo "OK, git tag matches hard-coded package version: 'v$VERSION'" + fi + + - name: Login to Github container registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build + env: + VERSION: ${{ needs.lint.outputs.version }} + COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }} + run: | + docker build \ + -t "ghcr.io/neuro-inc/wabucketref:${VERSION}" \ + -t "ghcr.io/neuro-inc/wabucketref:latest" \ + --build-arg COMMIT_SHA=${COMMIT_SHA} \ + . + - name: Push + env: + VERSION: ${{ needs.lint.outputs.version }} + run: | + docker push ghcr.io/neuro-inc/wabucketref --all-tags diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index de6a413..b9c29e7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,23 +1,23 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: 'v4.4.0' + rev: 'v4.6.0' hooks: - id: check-merge-conflict exclude: "rst$" - repo: https://github.com/asottile/yesqa - rev: v1.4.0 + rev: v1.5.0 hooks: - id: yesqa - repo: https://github.com/PyCQA/isort - rev: '5.12.0' + rev: '5.13.2' hooks: - id: isort - repo: https://github.com/psf/black - rev: '23.1.0' + rev: '24.8.0' hooks: - id: black - repo: https://github.com/pre-commit/pre-commit-hooks - rev: 'v4.4.0' + rev: 'v4.6.0' hooks: - id: check-case-conflict - id: debug-statements @@ -28,24 +28,24 @@ repos: - id: check-symlinks # Another entry is required to apply file-contents-sorter to another file - repo: https://github.com/pre-commit/pre-commit-hooks - rev: 'v4.4.0' + rev: 'v4.6.0' hooks: - id: file-contents-sorter files: | .gitignore - repo: https://github.com/asottile/pyupgrade - rev: 'v3.3.1' + rev: 'v3.17.0' hooks: - id: pyupgrade args: ['--py37-plus'] - repo: https://github.com/pycqa/flake8 - rev: '6.0.0' + rev: '7.1.1' hooks: - id: flake8 args: - "--max-line-length=88" - repo: https://github.com/rhysd/actionlint - rev: v1.6.23 + rev: v1.7.1 hooks: - id: actionlint-docker args: @@ -58,7 +58,7 @@ repos: - -ignore - 'SC2193:' - repo: https://github.com/sirosen/check-jsonschema - rev: 0.21.0 + rev: 0.29.2 hooks: - id: check-github-actions ci: diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..8e34c81 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.9.19 diff --git a/CLI.md b/CLI.md index 51a5bba..a68ea08 100644 --- a/CLI.md +++ b/CLI.md @@ -66,7 +66,7 @@ wabucket link [OPTIONS] BUCKET_PATH | _-t, --type TEXT_ | W&B artifact type to assign \[required\] | | _-a, --alias TEXT_ | W&B artifact alias to assign. If not set, the artifact alias will autogenerated in form of UUID \(default behaviour\). WaBucket could also use a SHA value computed of run's arguments if this option is set to `'!run-config-hash'`. This could be used to prevent artifacts overwrites. | | _-m, --metadata KEY=VALUE_ | Metainfo, which will be pinned to the artifact after upload. | -| _-s, --suffix TEXT_ | Suffix to append to the output names `artifact\_type`, `artifact\_name` and `artifact\_alias`, which are read by the Neuro-Flow. This is usefull if you need to upload several artifacts from within a single job. | +| _-s, --suffix TEXT_ | Suffix to append to the output names `artifact\_type`, `artifact\_name` and `artifact\_alias`, which are read by the Apolo-Flow. This is usefull if you need to upload several artifacts from within a single job. | | _--help_ | Show this message and exit. | ### wabucket upload @@ -88,5 +88,5 @@ wabucket upload [OPTIONS] SRC_DIR | _-a, --alias TEXT_ | W&B artifact alias to assign. If not set, the artifact alias will autogenerated in form of UUID \(default behaviour\). WaBucket could also use a SHA value computed of run's arguments if this option is set to `'!run-config-hash'`. This could be used to prevent artifacts overwrites. | | _-m, --metadata KEY=VALUE_ | Metainfo, which will be pinned to the artifact after upload. | | _--reff / --no-reff_ | Whether to upload artifact to bucket and use it as reference in W&B, or directly upload the folder to W&B servers. | -| _-s, --suffix TEXT_ | Suffix to append to the output names `artifact\_type`, `artifact\_name` and `artifact\_alias`, which are read by the Neuro-Flow. This is usefull if you need to upload several artifacts from within a single job. | +| _-s, --suffix TEXT_ | Suffix to append to the output names `artifact\_type`, `artifact\_name` and `artifact\_alias`, which are read by the Apolo-Flow. This is usefull if you need to upload several artifacts from within a single job. | | _--help_ | Show this message and exit. | diff --git a/Dockerfile b/Dockerfile index 2ab53b5..5e7b668 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM ghcr.io/neuro-inc/neuro-extras:23.7.0 +FROM ghcr.io/neuro-inc/apolo-extras:24.8.0 LABEL org.opencontainers.image.source = "https://github.com/neuro-inc/mlops-wandb-bucket-ref" diff --git a/Makefile b/Makefile index 20e2c77..b92d2be 100644 --- a/Makefile +++ b/Makefile @@ -18,7 +18,7 @@ format: .PHONY: image image: git push - neuro-extras image build . image:wabucketref:$(CURRENT_COMMIT) --build-arg COMMIT_SHA=$(CURRENT_COMMIT) + apolo-extras image build . image:wabucketref:$(CURRENT_COMMIT) --build-arg COMMIT_SHA=$(CURRENT_COMMIT) .PHONY: test test: diff --git a/pyproject.toml b/pyproject.toml index 41b3bed..a12ad14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,6 @@ +requires = ["setuptools>=51", "wheel>=0.36"] +build-backend = "setuptools.build_meta" + [tool.towncrier] package = "wabucketref" package_dir = "." diff --git a/requirements/python-base.txt b/requirements/python-base.txt index f6ff7e7..d6e1198 100644 --- a/requirements/python-base.txt +++ b/requirements/python-base.txt @@ -1,2 +1 @@ -neuro-cli>=23.7.0 -wandb[aws]>=0.10.33,<=0.15.5 +-e . diff --git a/requirements/python-dev.txt b/requirements/python-dev.txt index 87eb68a..4b0733b 100644 --- a/requirements/python-dev.txt +++ b/requirements/python-dev.txt @@ -1,7 +1,7 @@ -r python-base.txt -mypy==1.1.1 -pre-commit==3.1.1 -pytest==7.2.2 -pytest-asyncio==0.20.3 -towncrier==22.12.0 +mypy==1.11.2 +pre-commit==3.8.0 +pytest==8.3.2 +pytest-asyncio==0.24.0 +towncrier==24.8.0 diff --git a/setup.cfg b/setup.cfg index 9aa216d..544562a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,3 +1,48 @@ +[metadata] +name = wabucketref +version = attr: wabucketref.__version__ +description = Run experiments, track artifacts via WandB, store artifacts in bucket and refer them in WandB +url = https://github.com/neuro-inc/mlops-wandb-bucket-ref +long_description = file: README.md +long_description_content_type = text/markdown +author = Apolo Team +author_email = dev@apolo.us +license = Apache 2 +classifiers = + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Operating System :: OS Independent + Development Status :: 4 - Beta + Environment :: Console + Intended Audience :: Developers + Intended Audience :: Science/Research + Intended Audience :: Information Technology + Topic :: Scientific/Engineering :: Artificial Intelligence + Topic :: Software Development + Topic :: Utilities + License :: OSI Approved :: Apache Software License + +[options] +package_dir= + =. +packages=find: +zip_safe=False +python_requires = >=3.8.0 +include_package_data = True +install_requires = + apolo-cli>=24.8.1 + wandb[aws]>=0.10.33,<=0.17.8 + +[options.packages.find] +where=. + +[options.entry_points] +console_scripts = + wabucket=wabucketref.cli:main + [flake8] max-line-length = 88 exclude = diff --git a/setup.py b/setup.py index 2dd3639..056ba45 100644 --- a/setup.py +++ b/setup.py @@ -1,30 +1,4 @@ -import re +import setuptools -from setuptools import find_packages, setup - -with open("wabucketref/__init__.py") as f: - txt = f.read() - try: - version = re.findall(r'^__version__ = "([^"]+)"\r?$', txt, re.M)[0] - except IndexError: - raise RuntimeError("Unable to determine the version.") - -setup( - name="wabucketref", - version=version, - python_requires=">=3.8.1", - install_requires=[ - "neuro-cli>=23.7.0", - "wandb[aws]>=0.10.33,<=0.15.5", - ], - include_package_data=True, - description=( - "Run experiments, " - "track artifacts via WandB, " - "store artifacts in bucket and refer them in WandB" - ), - packages=find_packages(), - entry_points={"console_scripts": ["wabucket=wabucketref.cli:main"]}, - zip_safe=False, -) +setuptools.setup() diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index aedd60d..5186ca9 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -7,29 +7,29 @@ from typing import AsyncGenerator, Callable, Generator import pytest -from neuro_sdk import Bucket, Client, get +from apolo_sdk import Bucket, Client, get from yarl import URL @pytest.fixture -async def neuro_client() -> AsyncGenerator[Client, None]: +async def apolo_client() -> AsyncGenerator[Client]: async with await get() as client: yield client @pytest.fixture -async def bucket(neuro_client: Client) -> AsyncGenerator[Bucket, None]: +async def bucket(apolo_client: Client) -> AsyncGenerator[Bucket]: bucket_name = f"wabucket-test-{uuid.uuid4().hex[:10]}" - bucket = await neuro_client.buckets.create( + bucket = await apolo_client.buckets.create( name=bucket_name, ) yield bucket - await neuro_client.buckets.blob_rm(bucket.uri, recursive=True) - await neuro_client.buckets.rm(bucket_name) + await apolo_client.buckets.blob_rm(bucket.uri, recursive=True) + await apolo_client.buckets.rm(bucket_name) @pytest.fixture -def rand_artifact_dir(tmp_path: Path) -> Generator[Path, None, None]: +def rand_artifact_dir(tmp_path: Path) -> Generator[Path]: src = tmp_path / "src" src.mkdir(parents=True, exist_ok=True) (src / "somedata.csv").write_text(uuid.uuid4().hex) @@ -71,21 +71,21 @@ class BucketArtifactPath: @pytest.fixture async def bucket_artifact( - neuro_client: Client, + apolo_client: Client, bucket: Bucket, rand_artifact_dir: Path, files_hasher: RecuresiveHasher, -) -> AsyncGenerator[BucketArtifactPath, None]: +) -> AsyncGenerator[BucketArtifactPath]: bucket_name = f"wabucket-test-{uuid.uuid4().hex[:10]}" - bucket = await neuro_client.buckets.create( + bucket = await apolo_client.buckets.create( name=bucket_name, ) artifact_path = "artifact" - await neuro_client.buckets.upload_dir( + await apolo_client.buckets.upload_dir( URL(rand_artifact_dir.as_uri()), bucket.uri / artifact_path, ) bp = BucketArtifactPath(bucket, artifact_path, hash=files_hasher(rand_artifact_dir)) yield bp - await neuro_client.buckets.blob_rm(bucket.uri, recursive=True) - await neuro_client.buckets.rm(bucket_name) + await apolo_client.buckets.blob_rm(bucket.uri, recursive=True) + await apolo_client.buckets.rm(bucket_name) diff --git a/tests/integration/test_api.py b/tests/integration/test_api.py index 027a20d..9b20a54 100644 --- a/tests/integration/test_api.py +++ b/tests/integration/test_api.py @@ -4,7 +4,7 @@ from pathlib import Path import pytest -from neuro_sdk import Bucket +from apolo_sdk import Bucket from tests.integration.conftest import BucketArtifactPath, RecuresiveHasher from wabucketref.api import WaBucketRefAPI @@ -16,7 +16,7 @@ def test_upload_and_download( tmp_path: Path, files_hasher: RecuresiveHasher, ) -> None: - os.environ["NEURO_JOB_ID"] = "test-job" # to cover also neuro job metainfo fetch + os.environ["NEURO_JOB_ID"] = "test-job" # to cover also job metainfo fetch api = WaBucketRefAPI(bucket=bucket.name, project_name="wabucket-test") api.wandb_start_run() diff --git a/wabucketref/__init__.py b/wabucketref/__init__.py index 3bf57d6..be6ac0e 100644 --- a/wabucketref/__init__.py +++ b/wabucketref/__init__.py @@ -1,4 +1,4 @@ -__version__ = "23.7.0" +__version__ = "24.9.0" from .api import WaBucketRefAPI from .utils import parse_meta diff --git a/wabucketref/api.py b/wabucketref/api.py index 0b6d424..95c7c95 100644 --- a/wabucketref/api.py +++ b/wabucketref/api.py @@ -9,12 +9,12 @@ import time import uuid from pathlib import Path, PurePosixPath -from typing import Dict, Union +from typing import Any, Dict, Union import wandb from aiohttp import ClientError, ServerTimeoutError -from neuro_cli.asyncio_utils import Runner -from neuro_sdk import Bucket, Client, Factory +from apolo_cli.asyncio_utils import Runner +from apolo_sdk import Bucket, Client, Factory from wandb.wandb_run import Run from yarl import URL @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) -RunArgsType = Union[argparse.Namespace, Dict, str] +RunArgsType = Union[argparse.Namespace, Dict[str, Any], str] DEFAULT_REF_NAME = "platform_blob" @@ -72,7 +72,7 @@ def close(self) -> None: try: # Suppress prints unhandled exceptions # on event loop closing - sys.stderr = None # type: ignore + sys.stderr = None if self._runner._started: self._runner.__exit__(*sys.exc_info()) finally: @@ -89,7 +89,7 @@ def upload_artifact( overwrite: bool = False, suffix: str | None = None, ) -> str: - self._neuro_init_if_needed() + self._apolo_init_if_needed() self._wandb_init_if_needed() artifact = wandb.Artifact(name=art_name, type=art_type, metadata=art_metadata) artifact_alias = self._get_artifact_alias(art_alias) @@ -130,8 +130,8 @@ def upload_artifact( else: logger.info(f"Uploading artifact {src_folder} as directory...") artifact.add_dir(str(src_folder)) - wandb.log_artifact(artifact, aliases=[artifact_alias]) - self._set_neuro_flow_outputs(art_name, art_type, artifact_alias, suffix) + wandb.log_artifact(artifact, aliases=[artifact_alias]) # type: ignore + self._set_apolo_flow_outputs(art_name, art_type, artifact_alias, suffix) return artifact_alias async def _dir_exists_in_bucket(self, path: str) -> bool: @@ -152,7 +152,7 @@ def wandb_start_run( w_job_type: str | None = None, run_args: RunArgsType | None = None, ) -> Run: - self._neuro_init_if_needed() + self._apolo_init_if_needed() if wandb.run is not None: raise RuntimeError(f"W&B has registerred run {wandb.run.name}") @@ -163,7 +163,7 @@ def wandb_start_run( job_type=w_job_type, settings=wandb.Settings(start_method="fork"), config=run_args, # type: ignore - tags=self._try_get_neuro_tags(), + tags=self._try_get_apolo_tags(), ) if not isinstance(wandb_run, Run): raise RuntimeError(f"Failed to initialize W&B run, got: {wandb_run:r}") @@ -185,7 +185,7 @@ def _get_artifact_alias(self, art_alias: str | None = None) -> str: cfg = wandb.run.config run_config_str = " ".join([f"{k}={v}" for k, v in sorted(cfg.items())]) alias = hashlib.sha256(run_config_str.encode("UTF-8")).hexdigest() - elif type(art_alias) == str: + elif isinstance(art_alias, str): alias = art_alias else: raise ValueError(f"Wrong value for artifact alias: {art_alias}.") @@ -199,9 +199,9 @@ def download_artifact( dst_folder: Path | None = None, retries: int = 5, ) -> Path: - self._neuro_init_if_needed() + self._apolo_init_if_needed() self._wandb_init_if_needed() - artifact: wandb.Artifact = wandb.use_artifact( + artifact: wandb.Artifact = wandb.use_artifact( # type: ignore artifact_or_name=f"{art_name}:{art_alias}", type=art_type ) blob_uri = self._get_artifact_ref(artifact, art_name, art_type, art_alias) @@ -225,7 +225,7 @@ def download_artifact( backoff_time = 2 ** (i + 1) - 1 logger.warning(f"Retry {i + 1}/{retries} in {backoff_time} sec.") time.sleep(backoff_time) - self._neuro_init_if_needed() + self._apolo_init_if_needed() logger.info(f"Artifact was downloaded to '{dst_folder}'") return dst_folder @@ -250,40 +250,40 @@ def _get_artifact_ref( return URL(blob_ref) - def _try_get_neuro_tags(self) -> list[str] | None: + def _try_get_apolo_tags(self) -> list[str] | None: job_id = os.environ.get("NEURO_JOB_ID") if job_id: - # assuming the neuro platform job + # assuming the platform job result = [ f"job_id:{job_id}", f"job_name:{os.environ.get('NEURO_JOB_NAME')}", f"owner:{os.environ.get('NEURO_JOB_OWNER')}", ] - result.extend(self._get_neuro_job_tags(job_id)) + result.extend(self._get_apolo_job_tags(job_id)) return result else: return None - def _get_neuro_job_tags(self, job_id: str) -> list[str]: + def _get_apolo_job_tags(self, job_id: str) -> list[str]: assert self._runner assert self._n_client job_description = self._runner.run(self._n_client.jobs.status(job_id)) return list(job_description.tags) - def _neuro_init_if_needed(self) -> None: + def _apolo_init_if_needed(self) -> None: if not self._runner._started: self._runner.__enter__() self._runner.run(self._init_client()) self._runner.run(self._init_bucket()) - def _set_neuro_flow_outputs( + def _set_apolo_flow_outputs( self, art_name: str, art_type: str, art_alias: str | None = None, suffix: str | None = None, ) -> None: - # neuro-flow reads ::set-output... if only they are at the beginning of a string + # apolo-flow reads ::set-output... if only they are at the beginning of a string suff = "_" + suffix if suffix else "" print( f"::set-output name=artifact_name{suff}::{art_name}", @@ -331,7 +331,7 @@ def link( Returns: str: artifact alias """ - self._neuro_init_if_needed() + self._apolo_init_if_needed() full_path = self.bucket.uri / bucket_path logger.info(f"Creating W&B Artifact from '{full_path}'") if not self._runner.run(self._dir_exists_in_bucket(bucket_path)): @@ -345,6 +345,6 @@ def link( uri=str(full_path), checksum=False, ) - wandb.log_artifact(artifact, aliases=[artifact_alias]) - self._set_neuro_flow_outputs(art_name, art_type, artifact_alias, suffix) + wandb.log_artifact(artifact, aliases=[artifact_alias]) # type: ignore + self._set_apolo_flow_outputs(art_name, art_type, artifact_alias, suffix) return artifact_alias diff --git a/wabucketref/cli.py b/wabucketref/cli.py index f741c45..284fb46 100644 --- a/wabucketref/cli.py +++ b/wabucketref/cli.py @@ -9,7 +9,7 @@ from . import WaBucketRefAPI, __version__, parse_meta -@click.group() # type: ignore[arg-type] +@click.group() @click.version_option( version=__version__, message="W&B bucket artifacts package version: %(version)s" ) @@ -83,7 +83,7 @@ def main( ctx.call_on_close(api.close) -@main.command() # type: ignore +@main.command() @click.argument("src_dir") @click.option( "-n", @@ -136,7 +136,7 @@ def main( type=str, help=( "Suffix to append to the output names `artifact_type`, " - "`artifact_name` and `artifact_alias`, which are read by the Neuro-Flow. " + "`artifact_name` and `artifact_alias`, which are read by the Apolo-Flow. " "This is usefull if you need to upload several artifacts " "from within a single job." ), @@ -174,7 +174,7 @@ def upload( ) -@main.command() # type: ignore +@main.command() @click.argument("artifact_type") @click.argument("artifact_name") @click.argument("artifact_alias") @@ -227,7 +227,7 @@ def download( ) -@main.command() # type: ignore +@main.command() @click.argument("bucket_path") @click.option( "-n", @@ -271,7 +271,7 @@ def download( type=str, help=( "Suffix to append to the output names `artifact_type`, " - "`artifact_name` and `artifact_alias`, which are read by the Neuro-Flow. " + "`artifact_name` and `artifact_alias`, which are read by the Apolo-Flow. " "This is usefull if you need to upload several artifacts " "from within a single job." ),