diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f55ffa0..8e062e7 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -2,18 +2,24 @@ name: Lint on: push: branches: - - master + - main pull_request: jobs: lint: runs-on: ubuntu-latest + strategy: + matrix: + python-version: [ "3.10" ] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 name: Set up Python with: - python-version: "3.10" - - name: Install flake8 - run: python -m pip install flake8 - - name: Run linter - run: flake8 ./chord_drs ./tests --exclude=migrations + python-version: ${{ matrix.python-version }} + - name: Install poetry + run: pip install poetry + - name: Install dependencies + run: poetry install + - name: Lint + run: | + poetry run black --check ./chord_drs ./tests diff --git a/chord_drs/app.py b/chord_drs/app.py index 1e2c112..b47bd37 100644 --- a/chord_drs/app.py +++ b/chord_drs/app.py @@ -43,21 +43,24 @@ drs_compat=True, logger=application.logger, authz=authz_middleware, - )) + ), +) application.register_error_handler( BadRequest, flask_errors.flask_error_wrap( flask_errors.flask_bad_request_error, drs_compat=True, authz=authz_middleware, - )) + ), +) application.register_error_handler( Forbidden, flask_errors.flask_error_wrap( flask_errors.flask_forbidden_error, drs_compat=True, authz=authz_middleware, - )) + ), +) application.register_error_handler( NotFound, lambda e: flask_errors.flask_error_wrap( @@ -65,14 +68,16 @@ str(e), drs_compat=True, authz=authz_middleware, - )(e)) + )(e), +) application.register_error_handler( RequestedRangeNotSatisfiable, flask_errors.flask_error_wrap( flask_errors.flask_range_not_satisfiable_error, drs_compat=True, authz=authz_middleware, - )) + ), +) # Attach the database to the application and run migrations if needed db.init_app(application) diff --git a/chord_drs/backends/base.py b/chord_drs/backends/base.py index b810f63..a05f4a1 100644 --- a/chord_drs/backends/base.py +++ b/chord_drs/backends/base.py @@ -14,5 +14,6 @@ class FakeBackend(Backend): """ For the tests """ + def save(self, current_location: str, filename: str) -> str: return current_location diff --git a/chord_drs/backends/local.py b/chord_drs/backends/local.py index a714902..2209478 100644 --- a/chord_drs/backends/local.py +++ b/chord_drs/backends/local.py @@ -14,6 +14,7 @@ class LocalBackend(Backend): by this service. Lives on the current filesystem, in a directory specified by the DATA var env, the default being in ~/chord_drs_data """ + def __init__(self): self.base_location = Path(current_app.config["SERVICE_DATA"]) # We can use mkdir, since resolve has been called in config.py diff --git a/chord_drs/backends/minio.py b/chord_drs/backends/minio.py index e35bd7b..1c05788 100644 --- a/chord_drs/backends/minio.py +++ b/chord_drs/backends/minio.py @@ -12,10 +12,10 @@ class MinioBackend(Backend): def __init__(self, resource=None): self.minio = resource or boto3.resource( - 's3', + "s3", endpoint_url=current_app.config["MINIO_URL"], aws_access_key_id=current_app.config["MINIO_USERNAME"], - aws_secret_access_key=current_app.config["MINIO_PASSWORD"] + aws_secret_access_key=current_app.config["MINIO_PASSWORD"], ) self.bucket = self.minio.Bucket(current_app.config["MINIO_BUCKET"]) diff --git a/chord_drs/config.py b/chord_drs/config.py index a3ecd57..9754205 100644 --- a/chord_drs/config.py +++ b/chord_drs/config.py @@ -32,10 +32,8 @@ def _get_from_environ_or_fail(var: str) -> str: # when deployed inside chord_singularity, DATABASE will be set BASEDIR = os.environ.get("DATABASE", APP_DIR.parent) SERVICE_DATA: str = str( - Path(os.environ.get("DATA", os.path.join(Path.home(), "chord_drs_data"))) - .expanduser() - .absolute() - .resolve()) + Path(os.environ.get("DATA", os.path.join(Path.home(), "chord_drs_data"))).expanduser().absolute().resolve() +) # Authorization variables AUTHZ_ENABLED = os.environ.get("AUTHZ_ENABLED", "true").strip().lower() in TRUTH_VALUES diff --git a/chord_drs/routes.py b/chord_drs/routes.py index 5bdfa03..405f4e7 100644 --- a/chord_drs/routes.py +++ b/chord_drs/routes.py @@ -66,18 +66,21 @@ def _post_headers_getter(r: Request) -> dict[str, str]: token = r.form.get("token") return {"Authorization": f"Bearer {token}"} if token else {} - return tuple(r[0] or drs_obj.public for r, drs_obj in ( - zip( - authz_middleware.evaluate( - request, - [build_resource(drs_obj.project_id, drs_obj.dataset_id, drs_obj.data_type) for drs_obj in drs_objs], - [permission], - headers_getter=_post_headers_getter if request.method == "POST" else None, - mark_authz_done=mark_authz_done, - ), # gets us a matrix of len(drs_objs) rows, 1 column with the permission evaluation result - drs_objs, + return tuple( + r[0] or drs_obj.public + for r, drs_obj in ( + zip( + authz_middleware.evaluate( + request, + [build_resource(drs_obj.project_id, drs_obj.dataset_id, drs_obj.data_type) for drs_obj in drs_objs], + [permission], + headers_getter=_post_headers_getter if request.method == "POST" else None, + mark_authz_done=mark_authz_done, + ), # gets us a matrix of len(drs_objs) rows, 1 column with the permission evaluation result + drs_objs, + ) ) - )) # now a tuple of length len(drs_objs) of whether we have the permission for each object + ) # now a tuple of length len(drs_objs) of whether we have the permission for each object def fetch_and_check_object_permissions(object_id: str, permission: Permission) -> tuple[DrsBlob | DrsBundle, bool]: @@ -129,19 +132,23 @@ def build_contents(bundle: DrsBundle, expand: bool) -> list[DRSContentsDict]: bundles = DrsBundle.query.filter_by(parent_bundle=bundle).all() for b in bundles: - content.append({ - **({"contents": build_contents(b, expand)} if expand else {}), - "drs_uri": create_drs_uri(b.id), - "id": b.id, - "name": b.name, # TODO: Can overwrite... see spec - }) + content.append( + { + **({"contents": build_contents(b, expand)} if expand else {}), + "drs_uri": create_drs_uri(b.id), + "id": b.id, + "name": b.name, # TODO: Can overwrite... see spec + } + ) for c in bundle.objects: - content.append({ - "drs_uri": create_drs_uri(c.id), - "id": c.id, - "name": c.name, # TODO: Can overwrite... see spec - }) + content.append( + { + "drs_uri": create_drs_uri(c.id), + "id": c.id, + "name": c.name, # TODO: Can overwrite... see spec + } + ) return content @@ -161,7 +168,7 @@ def build_bundle_json(drs_bundle: DrsBundle, expand: bool = False) -> DRSObjectD # Description should be excluded if null in the database **({"description": drs_bundle.description} if drs_bundle.description is not None else {}), "id": drs_bundle.id, - "self_uri": create_drs_uri(drs_bundle.id) + "self_uri": create_drs_uri(drs_bundle.id), } @@ -170,7 +177,7 @@ def build_blob_json(drs_blob: DrsBlob, inside_container: bool = False) -> DRSObj blob_url: str = urllib.parse.urljoin( current_app.config["SERVICE_BASE_URL"] + "/", - url_for("drs_service.object_download", object_id=drs_blob.id).lstrip("/") + url_for("drs_service.object_download", object_id=drs_blob.id).lstrip("/"), ) https_access_method: DRSAccessMethodDict = { @@ -187,19 +194,23 @@ def build_blob_json(drs_blob: DrsBlob, inside_container: bool = False) -> DRSObj access_methods: list[DRSAccessMethodDict] = [https_access_method] if inside_container and data_source == DATA_SOURCE_LOCAL: - access_methods.append({ - "access_url": { - "url": f"file://{drs_blob.location}", - }, - "type": "file", - }) + access_methods.append( + { + "access_url": { + "url": f"file://{drs_blob.location}", + }, + "type": "file", + } + ) elif data_source == DATA_SOURCE_MINIO: - access_methods.append({ - "access_url": { - "url": drs_blob.location, - }, - "type": "s3", - }) + access_methods.append( + { + "access_url": { + "url": drs_blob.location, + }, + "type": "s3", + } + ) return { "access_methods": access_methods, @@ -215,7 +226,7 @@ def build_blob_json(drs_blob: DrsBlob, inside_container: bool = False) -> DRSObj # Description should be excluded if null in the database **({"description": drs_blob.description} if drs_blob.description is not None else {}), "id": drs_blob.id, - "self_uri": create_drs_uri(drs_blob.id) + "self_uri": create_drs_uri(drs_blob.id), } @@ -224,23 +235,25 @@ def build_blob_json(drs_blob: DrsBlob, inside_container: bool = False) -> DRSObj @authz_middleware.deco_public_endpoint def service_info(): # Spec: https://github.com/ga4gh-discovery/ga4gh-service-info - return jsonify(async_to_sync(build_service_info)( - { - "id": current_app.config["SERVICE_ID"], - "name": SERVICE_NAME, - "type": SERVICE_TYPE, - "description": "Data repository service (based on GA4GH's specs) for a Bento platform node.", - "organization": SERVICE_ORGANIZATION_C3G, - "contactUrl": "mailto:info@c3g.ca", - "version": __version__, - "bento": { - "serviceKind": BENTO_SERVICE_KIND, + return jsonify( + async_to_sync(build_service_info)( + { + "id": current_app.config["SERVICE_ID"], + "name": SERVICE_NAME, + "type": SERVICE_TYPE, + "description": "Data repository service (based on GA4GH's specs) for a Bento platform node.", + "organization": SERVICE_ORGANIZATION_C3G, + "contactUrl": "mailto:info@c3g.ca", + "version": __version__, + "bento": { + "serviceKind": BENTO_SERVICE_KIND, + }, }, - }, - debug=current_app.config["BENTO_DEBUG"], - local=current_app.config["BENTO_CONTAINER_LOCAL"], - logger=current_app.logger, - )) + debug=current_app.config["BENTO_DEBUG"], + local=current_app.config["BENTO_CONTAINER_LOCAL"], + logger=current_app.logger, + ) + ) def get_drs_object(object_id: str) -> tuple[DrsBlob | DrsBundle | None, bool]: @@ -297,12 +310,14 @@ def object_search(): elif fuzzy_name: objects = DrsBlob.query.filter(DrsBlob.name.contains(fuzzy_name)).all() elif search_q: - objects = DrsBlob.query.filter(or_( - DrsBlob.id.contains(search_q), - DrsBlob.name.contains(search_q), - DrsBlob.checksum.contains(search_q), - DrsBlob.description.contains(search_q), - )) + objects = DrsBlob.query.filter( + or_( + DrsBlob.id.contains(search_q), + DrsBlob.name.contains(search_q), + DrsBlob.checksum.contains(search_q), + DrsBlob.description.contains(search_q), + ) + ) else: authz_middleware.mark_authz_done(request) raise BadRequest("Missing GET search terms (name | fuzzy_name | q)") @@ -336,7 +351,8 @@ def object_download(object_id: str): if range_header is None: # Early return, no range header so send the whole thing res = make_response( - send_file(drs_object.location, mimetype=MIME_OCTET_STREAM, download_name=drs_object.name)) + send_file(drs_object.location, mimetype=MIME_OCTET_STREAM, download_name=drs_object.name) + ) res.headers["Accept-Ranges"] = "bytes" return res @@ -360,13 +376,13 @@ def object_download(object_id: str): if end > drs_end_byte: raise range_not_satisfiable_log_mark( - f"End cannot be past last byte ({end} > {drs_end_byte})", - drs_object.size) + f"End cannot be past last byte ({end} > {drs_end_byte})", drs_object.size + ) if end < start: raise range_not_satisfiable_log_mark( - f"Invalid range header: end cannot be less than start (start={start}, end={end})", - drs_object.size) + f"Invalid range header: end cannot be less than start (start={start}, end={end})", drs_object.size + ) def generate_bytes(): with open(drs_object.location, "rb") as fh2: @@ -390,20 +406,18 @@ def generate_bytes(): # Stream the bytes of the file or file segment from the generator function r = current_app.response_class(generate_bytes(), status=206, mimetype=MIME_OCTET_STREAM) - r.headers["Content-Length"] = (end + 1 - start) # byte range is inclusive, so need to add one + r.headers["Content-Length"] = end + 1 - start # byte range is inclusive, so need to add one r.headers["Content-Range"] = f"bytes {start}-{end}/{drs_object.size}" - r.headers["Content-Disposition"] = \ + r.headers["Content-Disposition"] = ( f"attachment; filename*=UTF-8'{urllib.parse.quote(drs_object.name, encoding='utf-8')}'" + ) return r # TODO: Support range headers for MinIO objects - only the local backend supports it for now # TODO: kinda greasy, not really sure we want to support such a feature later on response = make_response( send_file( - minio_obj["Body"], - mimetype="application/octet-stream", - as_attachment=True, - download_name=drs_object.name + minio_obj["Body"], mimetype="application/octet-stream", as_attachment=True, download_name=drs_object.name ) ) @@ -428,12 +442,16 @@ def object_ingest(): file = request.files.get("file") # This authz call determines everything, so we can mark authz as done when the call completes: - has_permission: bool = authz_middleware.evaluate_one( - request, - build_resource(project_id, dataset_id, data_type), - P_INGEST_DATA, - mark_authz_done=True, - ) if authz_enabled() else True + has_permission: bool = ( + authz_middleware.evaluate_one( + request, + build_resource(project_id, dataset_id, data_type), + P_INGEST_DATA, + mark_authz_done=True, + ) + if authz_enabled() + else True + ) if not has_permission: raise Forbidden("Forbidden") @@ -472,19 +490,23 @@ def object_ingest(): candidate_drs_object: DrsBlob | None = DrsBlob.query.filter_by(checksum=checksum).first() if candidate_drs_object is not None: - if all(( - candidate_drs_object.project_id == project_id, - candidate_drs_object.dataset_id == dataset_id, - candidate_drs_object.data_type == data_type, - candidate_drs_object.public == public, - )): + if all( + ( + candidate_drs_object.project_id == project_id, + candidate_drs_object.dataset_id == dataset_id, + candidate_drs_object.data_type == data_type, + candidate_drs_object.public == public, + ) + ): logger.info( - f"Found duplicate DRS object via checksum (will fully deduplicate): {candidate_drs_object}") + f"Found duplicate DRS object via checksum (will fully deduplicate): {candidate_drs_object}" + ) drs_object = candidate_drs_object else: logger.info( f"Found duplicate DRS object via checksum (will deduplicate JUST bytes): " - f"{candidate_drs_object}") + f"{candidate_drs_object}" + ) object_to_copy = candidate_drs_object if not drs_object: diff --git a/poetry.lock b/poetry.lock index b8d65d4..165c917 100644 --- a/poetry.lock +++ b/poetry.lock @@ -227,6 +227,52 @@ django = ["django (>=4.2.7,<5.1)", "djangorestframework (>=3.14.0,<3.16)"] fastapi = ["fastapi (>=0.104,<0.111)"] flask = ["flask (>=2.2.5,<4)"] +[[package]] +name = "black" +version = "24.4.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436"}, + {file = "black-24.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf"}, + {file = "black-24.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad"}, + {file = "black-24.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb"}, + {file = "black-24.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8"}, + {file = "black-24.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745"}, + {file = "black-24.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070"}, + {file = "black-24.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397"}, + {file = "black-24.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2"}, + {file = "black-24.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33"}, + {file = "black-24.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965"}, + {file = "black-24.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd"}, + {file = "black-24.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1"}, + {file = "black-24.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8"}, + {file = "black-24.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d"}, + {file = "black-24.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3"}, + {file = "black-24.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665"}, + {file = "black-24.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6"}, + {file = "black-24.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e"}, + {file = "black-24.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702"}, + {file = "black-24.4.0-py3-none-any.whl", hash = "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e"}, + {file = "black-24.4.0.tar.gz", hash = "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "boto3" version = "1.34.83" @@ -1298,6 +1344,17 @@ files = [ {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + [[package]] name = "platformdirs" version = "4.2.0" @@ -2314,4 +2371,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "de0372d6f746df8fd0a068626a842ef1b1ff55a5048d5fae65b9eb218b8fc53b" +content-hash = "a1b1fbd8082d39ea59b37a7a31d4df53e164d56b218542d00ca70c2a26e575b3" diff --git a/pyproject.toml b/pyproject.toml index 2bcac80..93c2f7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,12 @@ sqlalchemy-stubs = "^0.4" mypy = "^1.7.1" types-flask-cors = "^4.0.0.2" types-flask-migrate = "^4.0.0.7" +black = "^24.4.0" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" + +[tool.black] +line-length = 120 +exclude = "migrations" diff --git a/tests/conftest.py b/tests/conftest.py index 72b3e54..c53f60c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -24,21 +24,25 @@ def non_existant_dummy_file_path() -> str: # Function rather than constant so we can set environ first from chord_drs.config import APP_DIR + return str(APP_DIR.parent / "potato") def dummy_file_path() -> str: # Function rather than constant so we can set environ first from chord_drs.config import APP_DIR + return str(APP_DIR.parent / "tests" / "dummy_file.txt") def dummy_directory_path() -> str: # Function rather than constant so we can set environ first from chord_drs.config import APP_DIR + return str(APP_DIR / "migrations") def empty_file_path(): # Function rather than constant so we can set environ first from chord_drs.config import APP_DIR + return str(APP_DIR.parent / "tests" / "empty_file.txt") @@ -88,10 +92,7 @@ def client_local(): db.drop_all() -@pytest.fixture(params=[ - lazy_fixture("client_minio"), - lazy_fixture("client_local") -]) +@pytest.fixture(params=[lazy_fixture("client_minio"), lazy_fixture("client_local")]) def client(request): return request.param diff --git a/tests/test_commands.py b/tests/test_commands.py index b193930..e2a6baa 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -23,7 +23,7 @@ def test_ingest(client_local): runner = CliRunner() result = runner.invoke(ingest, [dummy_file]) - filename = dummy_file.split('/')[-1] + filename = dummy_file.split("/")[-1] obj = DrsBlob.query.filter_by(name=filename).first() assert result.exit_code == 0 @@ -32,7 +32,7 @@ def test_ingest(client_local): result = runner.invoke(ingest, [dummy_dir]) - filename = dummy_dir.split('/')[-1] + filename = dummy_dir.split("/")[-1] bundle = DrsBundle.query.filter_by(name=filename).first() assert result.exit_code == 0 diff --git a/tests/test_routes.py b/tests/test_routes.py index 84d760a..0005595 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -272,10 +272,13 @@ def test_search_bad_query(client, drs_bundle): @responses.activate -@pytest.mark.parametrize("url", ( - "/search?name=asd", - "/search?fuzzy_name=asd", -)) +@pytest.mark.parametrize( + "url", + ( + "/search?name=asd", + "/search?fuzzy_name=asd", + ), +) def test_search_object_empty(client, drs_bundle, url): authz_everything_true(count=len(drs_bundle.objects)) @@ -287,14 +290,17 @@ def test_search_object_empty(client, drs_bundle, url): @responses.activate -@pytest.mark.parametrize("url", ( - "/search?name=alembic.ini", - "/search?fuzzy_name=mbic", - "/search?name=alembic.ini&internal_path=1", - "/search?q=alembic.ini", - "/search?q=mbic.i", - "/search?q=alembic.ini&internal_path=1", -)) +@pytest.mark.parametrize( + "url", + ( + "/search?name=alembic.ini", + "/search?fuzzy_name=mbic", + "/search?name=alembic.ini&internal_path=1", + "/search?q=alembic.ini", + "/search?q=mbic.i", + "/search?q=alembic.ini&internal_path=1", + ), +) def test_search_object(client, drs_bundle, url): authz_everything_true(count=len(drs_bundle.objects)) # TODO: + 1 once we can search bundles diff --git a/tox.ini b/tox.ini index b137145..4548612 100644 --- a/tox.ini +++ b/tox.ini @@ -9,6 +9,6 @@ allowlist_externals = poetry commands = poetry install + poetry run black ./chord_drs ./tests poetry run pytest -svv --cov=chord_drs --cov-branch {posargs} - poetry run flake8 ./chord_drs ./tests --exclude=migrations poetry run coverage html