diff --git a/Makefile b/Makefile index e9b97bf9aa..5c6c1f2bd4 100644 --- a/Makefile +++ b/Makefile @@ -152,3 +152,7 @@ rebuildreqs: .env .docker-build ## | Rebuild requirements.txt file after requir .PHONY: updatereqs updatereqs: .env .docker-build ## | Update deps in requirements.txt file. ${DC} run --rm --no-deps app shell pip-compile --generate-hashes --strip-extras --upgrade + +.PHONY: servicestatus +servicestatus: .env .docker-build ## | Check service status + ${DC} run --rm --no-deps app shell service-status diff --git a/bin/gcs_cli.py b/bin/gcs_cli.py deleted file mode 100755 index a529ce8070..0000000000 --- a/bin/gcs_cli.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -# Manipulate emulated GCS storage. - -# Usage: ./bin/gcs_cli.py CMD - -import os -from pathlib import Path, PurePosixPath - -import click - -from google.auth.credentials import AnonymousCredentials -from google.cloud import storage -from google.cloud.exceptions import NotFound - - -def get_endpoint_url(): - return os.environ["STORAGE_EMULATOR_HOST"] - - -def get_client(): - project_id = os.environ["STORAGE_PROJECT_ID"] - return storage.Client(credentials=AnonymousCredentials(), project=project_id) - - -@click.group() -def gcs_group(): - """Local dev environment GCS manipulation script""" - - -@gcs_group.command("create") -@click.argument("bucket_name") -def create_bucket(bucket_name): - """Creates a bucket - - Specify BUCKET_NAME. - - """ - # README at https://github.com/fsouza/fake-gcs-server - endpoint_url = get_endpoint_url() - - client = get_client() - - try: - client.get_bucket(bucket_name) - click.echo(f"GCS bucket {bucket_name!r} exists in {endpoint_url!r}.") - except NotFound: - client.create_bucket(bucket_name) - click.echo(f"GCS bucket {bucket_name!r} in {endpoint_url!r} created.") - - -@gcs_group.command("delete") -@click.argument("bucket_name") -def delete_bucket(bucket_name): - """Deletes a bucket - - Specify BUCKET_NAME. - - """ - # README at https://github.com/fsouza/fake-gcs-server/ - endpoint_url = get_endpoint_url() - - client = get_client() - - bucket = None - - try: - bucket = client.get_bucket(bucket_name) - except NotFound: - click.echo(f"GCS bucket {bucket_name!r} at {endpoint_url!r} does not exist.") - return - - # delete blobs before deleting bucket, because bucket.delete(force=True) doesn't - # work if there are more than 256 blobs in the bucket. - for blob in bucket.list_blobs(): - blob.delete() - - bucket.delete() - click.echo(f"GCS bucket {bucket_name!r} at {endpoint_url!r} deleted.") - - -@gcs_group.command("list_buckets") -@click.option("--details/--no-details", default=True, type=bool, help="With details") -def list_buckets(details): - """List GCS buckets""" - - client = get_client() - - buckets = client.list_buckets() - for bucket in buckets: - if details: - # https://cloud.google.com/storage/docs/json_api/v1/buckets#resource-representations - click.echo(f"{bucket.name}\t{bucket.time_created}") - else: - click.echo(f"{bucket.name}") - - -@gcs_group.command("list_objects") -@click.option("--details/--no-details", default=True, type=bool, help="With details") -@click.argument("bucket_name") -def list_objects(bucket_name, details): - """List contents of a bucket""" - - client = get_client() - - try: - client.get_bucket(bucket_name) - except NotFound: - click.echo(f"GCS bucket {bucket_name!r} does not exist.") - return - - blobs = list(client.list_blobs(bucket_name)) - if blobs: - for blob in blobs: - # https://cloud.google.com/storage/docs/json_api/v1/objects#resource-representations - if details: - click.echo(f"{blob.name}\t{blob.size}\t{blob.updated}") - else: - click.echo(f"{blob.name}") - else: - click.echo("No objects in bucket.") - - -@gcs_group.command("upload") -@click.argument("source") -@click.argument("destination") -def upload(source, destination): - """Upload files to a bucket - - SOURCE is a path to a file or directory of files. will recurse on directory trees - - DESTINATION is a path to a file or directory in the bucket. If SOURCE is a - directory or DESTINATION ends with "/", then DESTINATION is treated as a directory. - """ - - client = get_client() - - # remove protocol from destination if present - destination = destination.split("://", 1)[-1] - bucket_name, _, prefix = destination.partition("/") - prefix_path = PurePosixPath(prefix) - - try: - bucket = client.get_bucket(bucket_name) - except NotFound as e: - raise click.ClickException(f"GCS bucket {bucket_name!r} does not exist.") from e - - source_path = Path(source) - if not source_path.exists(): - raise click.ClickException(f"local path {source!r} does not exist.") - source_is_dir = source_path.is_dir() - if source_is_dir: - sources = [p for p in source_path.rglob("*") if not p.is_dir()] - else: - sources = [source_path] - if not sources: - raise click.ClickException(f"No files in directory {source!r}.") - for path in sources: - if source_is_dir: - # source is a directory so treat destination as a directory - key = str(prefix_path / path.relative_to(source_path)) - elif prefix == "" or prefix.endswith("/"): - # source is a file but destination is a directory, preserve file name - key = str(prefix_path / path.name) - else: - key = prefix - blob = bucket.blob(key) - blob.upload_from_filename(path) - click.echo(f"Uploaded gs://{bucket_name}/{key}") - - -def main(argv=None): - argv = argv or [] - gcs_group(argv) - - -if __name__ == "__main__": - gcs_group() diff --git a/bin/license-check.py b/bin/license-check.py deleted file mode 100755 index 05b755288d..0000000000 --- a/bin/license-check.py +++ /dev/null @@ -1,152 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -""" -This script checks files for license headers. - -This requires Python 3.8+ to run. - -See https://github.com/willkg/socorro-release/#readme for details. - -repo: https://github.com/willkg/socorro-release/ -sha: d19f45bc9eedae34de2905cdd4adf7b9fd03f870 - -""" - -import argparse -import pathlib -import subprocess -import sys - - -DESCRIPTION = ( - "Checks files in specified directory for license headers. " - + "If you don't specify a target, it'll check all files in \"git ls-files\"." -) - -# From https://www.mozilla.org/en-US/MPL/2.0/ -MPLV2 = [ - "This Source Code Form is subject to the terms of the Mozilla Public", - "License, v. 2.0. If a copy of the MPL was not distributed with this", - "file, You can obtain one at https://mozilla.org/MPL/2.0/.", -] - - -LANGUAGE_DATA = {".py": {"comment": ("#",)}} - - -def is_code_file(path: pathlib.Path): - """Determines whether the file is a code file we need to check. - - :param path: the Path for the file - - :returns: True if it's a code file to check, False otherwise. - - """ - if not path.is_file(): - return False - ending: pathlib.Path = path.suffix - return ending in LANGUAGE_DATA - - -def has_license_header(path: pathlib.Path): - """Determines if file at path has an MPLv2 license header. - - :param path: the Path for the file - - :returns: True if it does, False if it doesn't. - - """ - ending: pathlib.Path = path.suffix - comment_indicators = LANGUAGE_DATA[ending]["comment"] - - header = [] - with open(path, "r") as fp: - firstline = True - for line in fp.readlines(): - if firstline and line.startswith("#!"): - firstline = False - continue - - line = line.strip() - # NOTE(willkg): this doesn't handle multiline comments like in C++ - for indicator in comment_indicators: - line = line.strip(indicator) - line = line.strip() - - # Skip blank lines - if not line: - continue - - header.append(line) - if len(header) == len(MPLV2): - if header[: len(MPLV2)] == MPLV2: - return True - else: - break - - return False - - -def main(args): - parser = argparse.ArgumentParser(description=DESCRIPTION) - parser.add_argument( - "-l", "--file-only", action="store_true", help="print files only" - ) - parser.add_argument("--verbose", action="store_true", help="verbose output") - parser.add_argument("target", help="file or directory tree to check", nargs="?") - - parsed = parser.parse_args(args) - - if parsed.target: - target = pathlib.Path(parsed.target) - if not target.exists(): - if not parsed.file_only: - print(f"Not a valid file or directory: {target}") - return 1 - - if target.is_file(): - targets = [target] - - elif target.is_dir(): - targets = list(target.rglob("*")) - - else: - ret = subprocess.check_output(["git", "ls-files"]) - targets = [ - pathlib.Path(target.strip()) for target in ret.decode("utf-8").splitlines() - ] - - missing_headers = 0 - - # Iterate through all the files in this target directory - for path in targets: - if parsed.verbose: - print(f"Checking {path}") - if is_code_file(path) and not has_license_header(path): - missing_headers += 1 - if parsed.file_only: - print(str(path)) - else: - print(f"File {path} does not have license header.") - - if missing_headers > 0: - if not parsed.file_only: - print(f"Files with missing headers: {missing_headers}") - print("") - print("Add this:") - print("") - print("\n".join(MPLV2)) - return 1 - - if not parsed.file_only: - print("No files missing headers.") - - return 0 - - -if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) diff --git a/bin/lint.sh b/bin/lint.sh index f292215b62..159a5346f1 100755 --- a/bin/lint.sh +++ b/bin/lint.sh @@ -28,13 +28,13 @@ else echo ">>> license check (${PYTHON_VERSION})" if [[ -d ".git" ]]; then - # If the .git directory exists, we can let license-check.py do + # If the .git directory exists, we can let license-check do # git ls-files. - python bin/license-check.py + license-check else # The .git directory doesn't exist, so run it on all the Python # files in the tree. - python bin/license-check.py . + license-check . fi echo ">>> eslint (js)" diff --git a/bin/pubsub_cli.py b/bin/pubsub_cli.py deleted file mode 100755 index 02de6ae156..0000000000 --- a/bin/pubsub_cli.py +++ /dev/null @@ -1,218 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -# Pub/Sub manipulation script. -# -# Note: Run this in the base container which has access to Pub/Sub. -# -# Usage: ./bin/pubsub_cli.py [SUBCOMMAND] - -import sys - -import click -from google.cloud import pubsub_v1 -from google.api_core.exceptions import AlreadyExists, NotFound - -from socorro import settings - - -@click.group() -def pubsub_group(): - """Local dev environment Pub/Sub emulator manipulation script.""" - - -@pubsub_group.command("list_topics") -@click.argument("project_id") -@click.pass_context -def list_topics(ctx, project_id): - """List topics for this project.""" - click.echo(f"Listing topics in project {project_id}.") - publisher = pubsub_v1.PublisherClient() - - for topic in publisher.list_topics(project=f"projects/{project_id}"): - click.echo(topic.name) - - -@pubsub_group.command("list_subscriptions") -@click.argument("project_id") -@click.argument("topic_name") -@click.pass_context -def list_subscriptions(ctx, project_id, topic_name): - """List subscriptions for a given topic.""" - click.echo(f"Listing subscriptions in topic {topic_name!r}:") - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) - - for subscription in publisher.list_topic_subscriptions(topic=topic_path): - click.echo(subscription) - - -@pubsub_group.command("create_topic") -@click.argument("project_id") -@click.argument("topic_name") -@click.pass_context -def create_topic(ctx, project_id, topic_name): - """Create topic.""" - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) - - try: - publisher.create_topic(name=topic_path) - click.echo(f"Topic created: {topic_path}") - except AlreadyExists: - click.echo("Topic already created.") - - -@pubsub_group.command("create_subscription") -@click.argument("project_id") -@click.argument("topic_name") -@click.argument("subscription_name") -@click.pass_context -def create_subscription(ctx, project_id, topic_name, subscription_name): - """Create subscription.""" - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) - - subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_name) - try: - subscriber.create_subscription( - name=subscription_path, - topic=topic_path, - ack_deadline_seconds=600, - ) - click.echo(f"Subscription created: {subscription_path}") - except AlreadyExists: - click.echo("Subscription already created.") - - -@pubsub_group.command("delete_topic") -@click.argument("project_id") -@click.argument("topic_name") -@click.pass_context -def delete_topic(ctx, project_id, topic_name): - """Delete a topic and all subscriptions.""" - publisher = pubsub_v1.PublisherClient() - subscriber = pubsub_v1.SubscriberClient() - topic_path = publisher.topic_path(project_id, topic_name) - - # Delete all subscriptions - for subscription in publisher.list_topic_subscriptions(topic=topic_path): - click.echo(f"Deleting {subscription} ...") - subscriber.delete_subscription(subscription=subscription) - - # Delete topic - try: - publisher.delete_topic(topic=topic_path) - click.echo(f"Topic deleted: {topic_name}") - except NotFound: - click.echo(f"Topic {topic_name} does not exist.") - - -@pubsub_group.command("publish") -@click.argument("project_id") -@click.argument("topic_name") -@click.argument("crashids", nargs=-1) -@click.pass_context -def publish(ctx, project_id, topic_name, crashids): - """Publish crash_id to a given topic.""" - click.echo(f"Publishing crash ids to topic: {topic_name!r}:") - # configure publisher to group all crashids into a single batch - publisher = pubsub_v1.PublisherClient( - batch_settings=pubsub_v1.types.BatchSettings(max_messages=len(crashids)) - ) - topic_path = publisher.topic_path(project_id, topic_name) - - # Pull crash ids from stdin if there are any - if not crashids and not sys.stdin.isatty(): - crashids = list(click.get_text_stream("stdin").readlines()) - - if not crashids: - raise click.BadParameter( - "No crashids provided.", ctx=ctx, param="crashids", param_hint="crashids" - ) - - # publish all crashes before checking futures to allow for batching - futures = [ - publisher.publish(topic_path, crashid.encode("utf-8"), timeout=5) - for crashid in crashids - ] - for future in futures: - click.echo(future.result()) - - -@pubsub_group.command("pull") -@click.argument("project_id") -@click.argument("subscription_name") -@click.option("--ack/--no-ack", is_flag=True, default=False) -@click.option("--max-messages", default=1, type=int) -@click.pass_context -def pull(ctx, project_id, subscription_name, ack, max_messages): - """Pull crash id from a given subscription.""" - click.echo(f"Pulling crash id from subscription {subscription_name!r}:") - subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_name) - - response = subscriber.pull( - subscription=subscription_path, - max_messages=max_messages, - return_immediately=True, - ) - if not response.received_messages: - return - - ack_ids = [] - for msg in response.received_messages: - click.echo(f"crash id: {msg.message.data}") - ack_ids.append(msg.ack_id) - - if ack: - # Acknowledges the received messages so they will not be sent again. - subscriber.acknowledge(subscription=subscription_path, ack_ids=ack_ids) - - -@pubsub_group.command("create-all") -@click.pass_context -def create_all(ctx): - """Create Pub/Sub queues related to processing.""" - options = settings.QUEUE_PUBSUB["options"] - project_id = options["project_id"] - queues = { - options["standard_topic_name"]: options["standard_subscription_name"], - options["priority_topic_name"]: options["priority_subscription_name"], - options["reprocessing_topic_name"]: options["reprocessing_subscription_name"], - } - for topic_name, subscription_name in queues.items(): - ctx.invoke(create_topic, project_id=project_id, topic_name=topic_name) - ctx.invoke( - create_subscription, - project_id=project_id, - topic_name=topic_name, - subscription_name=subscription_name, - ) - - -@pubsub_group.command("delete-all") -@click.pass_context -def delete_all(ctx): - """Delete Pub/Sub queues related to processing.""" - options = settings.QUEUE_PUBSUB["options"] - project_id = options["project_id"] - for topic_name in ( - options["standard_topic_name"], - options["priority_topic_name"], - options["reprocessing_topic_name"], - ): - ctx.invoke(delete_topic, project_id=project_id, topic_name=topic_name) - - -def main(argv=None): - argv = argv or [] - pubsub_group(argv) - - -if __name__ == "__main__": - pubsub_group() diff --git a/bin/release.py b/bin/release.py deleted file mode 100755 index 6912035dda..0000000000 --- a/bin/release.py +++ /dev/null @@ -1,483 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -""" -This script handles releases for this project. - -This has two subcommands: ``make-bug`` and ``make-tag``. See the help text for -both. - -This requires Python 3.8+ to run. - -Note: If you want to use ``pyproject.toml`` and you're using Python <3.11, this -also requires the tomli library. - -See https://github.com/willkg/socorro-release/#readme for details. - -repo: https://github.com/willkg/socorro-release/ -sha: d19f45bc9eedae34de2905cdd4adf7b9fd03f870 - -""" - -import argparse -import configparser -import datetime -import json -import os -import re -import shlex -import subprocess -import sys -from urllib.parse import urlencode -from urllib.request import urlopen - - -DESCRIPTION = """ -release.py makes it easier to create deploy bugs and push tags to trigger -deploys. - -For help, see: https://github.com/willkg/socorro-release/ -""" - -GITHUB_API = "https://api.github.com/" -BZ_CREATE_URL = "https://bugzilla.mozilla.org/enter_bug.cgi" -BZ_BUG_JSON_URL = "https://bugzilla.mozilla.org/rest/bug/" - -DEFAULT_CONFIG = { - # Bugzilla product and component to write new bugs in - "bugzilla_product": "", - "bugzilla_component": "", - # GitHub user and project name - "github_user": "", - "github_project": "", - # The name of the main branch - "main_branch": "", - # The tag structure using datetime formatting markers - "tag_name_template": "%Y.%m.%d", -} - -LINE = "=" * 80 - -# Recognize "bug-NNNNNNN", "bug NNNNNNN", and multi-bug variants -BUG_RE = re.compile(r"\bbug(?:s?:?\s*|-)([\d\s,\+&#and]+)\b", re.IGNORECASE) - -# Recognize "bug-NNNNNNN" -BUG_HYPHEN_PREFIX_RE = re.compile(r"bug-([\d]+)", re.IGNORECASE) - - -def get_config(): - """Generates configuration. - - This tries to pull configuration from: - - 1. the ``[tool.release]`` table from a ``pyproject.toml`` file, OR - 2. the ``[tool:release]`` section of a ``setup.cfg`` file - - If neither exist, then it uses defaults. - - :returns: configuration dict - - """ - my_config = dict(DEFAULT_CONFIG) - - if os.path.exists("pyproject.toml"): - if sys.version_info >= (3, 11): - import tomllib - else: - try: - import tomli as tomllib - except ImportError: - print( - "For Python <3.11, you need to install tomli to work with pyproject.toml " - + "files." - ) - tomllib = None - - if tomllib is not None: - with open("pyproject.toml", "rb") as fp: - data = tomllib.load(fp) - - config_data = data.get("tool", {}).get("release", {}) - if config_data: - for key, default_val in my_config.items(): - my_config[key] = config_data.get(key, default_val) - return my_config - - if os.path.exists("setup.cfg"): - config = configparser.ConfigParser() - config.read("setup.cfg") - - if "tool:release" in config: - config = config["tool:release"] - for key, default_val in my_config.items(): - my_config[key] = config.get(key, default_val) - - return my_config - - return my_config - - -def find_bugs(line): - """Returns all the bug numbers from the line. - - >>> get_bug_numbers("some line") - [] - >>> get_bug_numbers("bug-1111111: some line") - ["1111111"] - >>> get_bug_numbers("bug 1111111, 2222222: some line") - ["1111111", "2222222"] - - """ - matches = BUG_RE.findall(line) - if not matches: - return [] - bugs = [] - for match in matches: - for part in re.findall(r"\d+", match): - if part: - bugs.append(part) - return bugs - - -def fetch(url, is_json=True): - """Fetch data from a url - - This raises URLError on HTTP request errors. It also raises JSONDecode - errors if it's not valid JSON. - - """ - fp = urlopen(url) - data = fp.read() - if is_json: - return json.loads(data) - return data - - -def fetch_history_from_github(owner, repo, from_rev, main_branch): - url = f"{GITHUB_API}repos/{owner}/{repo}/compare/{from_rev}...{main_branch}" - return fetch(url) - - -def check_output(cmdline, **kwargs): - args = shlex.split(cmdline) - return subprocess.check_output(args, **kwargs).decode("utf-8").strip() - - -def get_remote_name(github_user): - """Figures out the right remote to use - - People name the git remote differently, so this figures out which one to - use. - - :arg str github_user: the github user for the remote name to use - - :returns: the name of the remote - - :raises Exception: if it can't figure out the remote name for the specified - user - - """ - # Figure out remote to push tag to - remote_output = check_output("git remote -v") - - def check_ssh(github_user, remote_url): - return f":{github_user}/" in remote_url - - def check_https(github_user, remote_url): - return f"/{github_user}/" in remote_url - - for line in remote_output.splitlines(): - line = line.split("\t") - if check_ssh(github_user, line[1]) or check_https(github_user, line[1]): - return line[0] - - raise Exception(f"Can't figure out remote name for {github_user}.") - - -def make_tag( - bug_number, - github_project, - github_user, - remote_name, - tag_name, - commits_since_tag, -): - """Tags a release.""" - if bug_number: - resp = fetch(BZ_BUG_JSON_URL + bug_number, is_json=True) - bug_summary = resp["bugs"][0]["summary"] - - input(f">>> Using bug {bug_number}: {bug_summary}. Correct? Ctrl-c to cancel") - - message = ( - f"Tag {tag_name} (bug #{bug_number})\n\n" - + "\n".join(commits_since_tag) - + f"\n\nDeploy bug #{bug_number}" - ) - else: - message = f"Tag {tag_name}\n\n" + "\n".join(commits_since_tag) - - # Print out new tag information - print("") - print(">>> New tag: %s" % tag_name) - print(">>> Tag message:") - print(LINE) - print(message) - print(LINE) - - # Create tag - input(f">>> Ready to tag {tag_name}? Ctrl-c to cancel") - print("") - print(">>> Creating tag...") - subprocess.check_call(["git", "tag", "-s", tag_name, "-m", message]) - - # Push tag - input(f">>> Ready to push to remote {remote_name}? Ctrl-c to cancel") - print("") - print(">>> Pushing...") - subprocess.check_call(["git", "push", "--tags", remote_name, tag_name]) - - if bug_number: - # Show url to tag information on GitHub for bug comment - tag_url = ( - f"https://github.com/{github_user}/{github_project}/releases/tag/{tag_name}" - ) - print("") - print(f">>> Copy and paste this tag url into bug #{bug_number}.") - print(">>> %<-----------------------------------------------") - print(f"{tag_url}") - print(">>> %<-----------------------------------------------") - - -def make_bug( - github_project, - tag_name, - commits_since_tag, - bugs_referenced, - bugzilla_product, - bugzilla_component, -): - """Creates a bug.""" - summary = f"{github_project} deploy: {tag_name}" - print(">>> Creating deploy bug...") - print(">>> Summary") - print(summary) - print() - - description = [ - f"We want to do a deploy for `{github_project}` tagged `{tag_name}`.", - "", - "It consists of the following commits:", - "", - ] - description.extend(commits_since_tag) - if bugs_referenced: - description.append("") - description.append("Bugs referenced:") - description.append("") - for bug in sorted(bugs_referenced): - description.append(f"* bug #{bug}") - description = "\n".join(description) - - print(">>> Description") - print(description) - print() - - if bugzilla_product: - bz_params = { - "priority": "P2", - "bug_type": "task", - "comment": description, - "form_name": "enter_bug", - "short_desc": summary, - } - - bz_params["product"] = bugzilla_product - if bugzilla_component: - bz_params["component"] = bugzilla_component - - bugzilla_link = BZ_CREATE_URL + "?" + urlencode(bz_params) - print(">>> Link to create bug (may not work if it's sufficiently long)") - print(bugzilla_link) - - -def run(): - config = get_config() - - parser = argparse.ArgumentParser(description=DESCRIPTION) - - # Add items that can be configured to argparse as configuration options. - # This makes it possible to specify or override configuration with command - # line arguments. - for key, val in config.items(): - key_arg = key.replace("_", "-") - default_val = val.replace("%", "%%") - parser.add_argument( - f"--{key_arg}", - default=val, - help=f"override configuration {key}; defaults to {default_val!r}", - ) - - subparsers = parser.add_subparsers(dest="cmd") - subparsers.required = True - - subparsers.add_parser("make-bug", help="Make a deploy bug") - make_tag_parser = subparsers.add_parser("make-tag", help="Make a tag and push it") - make_tag_parser.add_argument( - "--with-bug", dest="bug", help="Bug for this deploy if any." - ) - make_tag_parser.add_argument( - "--with-tag", - dest="tag", - help="Tag to use; defaults to figuring out the tag using tag_name_template.", - ) - - args = parser.parse_args() - - github_project = args.github_project - github_user = args.github_user - main_branch = args.main_branch - tag_name_template = args.tag_name_template - - if not github_project or not github_user or not main_branch: - print("main_branch, github_project, and github_user are required.") - print( - "Either set them in pyproject.toml/setup.cfg or specify them as command " - + "line arguments." - ) - return 1 - - # Let's make sure we're up-to-date and on main branch - current_branch = check_output("git rev-parse --abbrev-ref HEAD") - if current_branch != main_branch: - print( - f"Must be on the {main_branch} branch to do this; currently on {current_branch}" - ) - return 1 - - # The current branch can't be dirty - try: - subprocess.check_call("git diff --quiet --ignore-submodules HEAD".split()) - except subprocess.CalledProcessError: - print( - "Can't be \"git dirty\" when we're about to git pull. " - "Stash or commit what you're working on." - ) - return 1 - - remote_name = get_remote_name(github_user) - - # Get existing git tags from remote - check_output( - f"git pull {remote_name} {main_branch} --tags", stderr=subprocess.STDOUT - ) - - # Figure out the most recent tag details - all_tags = check_output("git tag --list --sort=-creatordate").splitlines() - if all_tags: - last_tag = all_tags[0] - last_tag_message = check_output(f'git tag -l --format="%(contents)" {last_tag}') - print(f">>> Last tag was: {last_tag}") - print(">>> Message:") - print(LINE) - print(last_tag_message) - print(LINE) - - resp = fetch_history_from_github( - github_user, github_project, last_tag, main_branch - ) - if resp["status"] != "ahead": - print(f"Nothing to deploy! {resp['status']}") - return - else: - first_commit = check_output("git rev-list --max-parents=0 HEAD") - resp = fetch_history_from_github(github_user, github_project, first_commit) - - bugs_referenced = set() - commits_since_tag = [] - for commit in resp["commits"]: - # Skip merge commits - if len(commit["parents"]) > 1: - continue - - # Use the first 7 characters of the commit sha - sha = commit["sha"][:7] - - # Use the first line of the commit message which is the summary and - # truncate it to 80 characters - summary = commit["commit"]["message"] - summary = summary.splitlines()[0] - summary = summary[:80] - - # Bug 1868455: While GitHub autolinking doesn't suport spaces, Bugzilla - # autolinking doesn't support hyphens. When creating a bug, we want to - # use "bug NNNNNNN" form so Bugzilla autolinking works. - if args.cmd == "make-bug": - summary = BUG_HYPHEN_PREFIX_RE.sub(r"bug \1", summary) - - bugs = find_bugs(summary) - if bugs: - bugs_referenced |= set(bugs) - - # Figure out who did the commit prefering GitHub usernames - who = commit["author"] - if not who: - who = "?" - else: - who = who.get("login", "?") - - commits_since_tag.append("`%s`: %s (%s)" % (sha, summary, who)) - - # Use specified tag or figure out next tag name as YYYY.MM.DD format - if args.cmd == "make-tag" and args.tag: - tag_name = args.tag - else: - tag_name = datetime.datetime.now().strftime(tag_name_template) - - # If there's already a tag, then increment the -N until we find a tag name - # that doesn't exist, yet - existing_tags = check_output(f'git tag -l "{tag_name}*"').splitlines() - if existing_tags: - tag_name_attempt = tag_name - index = 2 - while tag_name_attempt in existing_tags: - tag_name_attempt = f"{tag_name}-{index}" - index += 1 - tag_name = tag_name_attempt - - if args.cmd == "make-bug": - make_bug( - github_project, - tag_name, - commits_since_tag, - bugs_referenced, - args.bugzilla_product, - args.bugzilla_component, - ) - - elif args.cmd == "make-tag": - if args.bugzilla_product and args.bugzilla_component and not args.bug: - print( - "Bugzilla product and component are specified, but you didn't " - + "specify a bug number with --with-bug." - ) - return 1 - make_tag( - args.bug, - github_project, - github_user, - remote_name, - tag_name, - commits_since_tag, - ) - - else: - parser.print_help() - return 1 - - -if __name__ == "__main__": - sys.exit(run()) diff --git a/bin/run_migrations.sh b/bin/run_migrations.sh index 357d811f8d..9caec42bde 100755 --- a/bin/run_migrations.sh +++ b/bin/run_migrations.sh @@ -19,7 +19,7 @@ PRECMD="" # send errors to sentry. if [ -n "${SENTRY_DSN:-}" ]; then echo "SENTRY_DSN defined--enabling sentry." - PRECMD="python bin/sentry-wrap.py wrap-process --timeout=600 --" + PRECMD="sentry-wrap wrap-process --timeout=600 --" else echo "SENTRY_DSN not defined--not enabling sentry." fi diff --git a/bin/run_postdeploy.sh b/bin/run_postdeploy.sh index 5b753d470f..7fb03b703d 100755 --- a/bin/run_postdeploy.sh +++ b/bin/run_postdeploy.sh @@ -19,7 +19,7 @@ PRECMD="" # send errors to sentry. if [ -n "${SENTRY_DSN:-}" ]; then echo "SENTRY_DSN defined--enabling sentry." - PRECMD="python bin/sentry-wrap.py wrap-process --timeout=600 --" + PRECMD="sentry-wrap wrap-process --timeout=600 --" else echo "SENTRY_DSN not defined--not enabling sentry." fi diff --git a/bin/sentry-wrap.py b/bin/sentry-wrap.py deleted file mode 100755 index 4e648c686a..0000000000 --- a/bin/sentry-wrap.py +++ /dev/null @@ -1,109 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -# Wraps a command such that if it fails, an error report is sent to the Sentry service -# specified by SENTRY_DSN in the environment. -# -# Usage: python bin/sentry-wrap.py wrap-process -- [CMD] -# Wraps a process in error-reporting Sentry goodness. -# -# Usage: python bin/sentry-wrap.py test-sentry -# Tests Sentry configuration and connection. - - -import os -import shlex -import subprocess -import sys -import time -import traceback - -import click -import sentry_sdk -from sentry_sdk import capture_exception, capture_message - - -@click.group() -def cli_main(): - pass - - -@cli_main.command() -@click.pass_context -def test_sentry(ctx): - sentry_dsn = os.environ.get("SENTRY_DSN") - - if not sentry_dsn: - click.echo("SENTRY_DSN is not defined. Exiting.") - sys.exit(1) - - sentry_sdk.init(sentry_dsn) - capture_message("Sentry test") - click.echo("Success. Check Sentry.") - - -@cli_main.command() -@click.option( - "--timeout", - default=300, - help="Timeout in seconds to wait for process before giving up.", -) -@click.argument("cmd", nargs=-1) -@click.pass_context -def wrap_process(ctx, timeout, cmd): - sentry_dsn = os.environ.get("SENTRY_DSN") - - if not sentry_dsn: - click.echo("SENTRY_DSN is not defined. Exiting.") - sys.exit(1) - - if not cmd: - raise click.UsageError("CMD required") - - start_time = time.time() - - sentry_sdk.init(sentry_dsn) - - cmd = " ".join(cmd) - cmd_args = shlex.split(cmd) - click.echo(f"sentry-wrap: running: {cmd_args}") - - try: - ret = subprocess.run(cmd_args, capture_output=True, timeout=timeout) - if ret.returncode != 0: - sentry_sdk.set_context( - "status", - { - "exit_code": ret.returncode, - "stdout": ret.stdout.decode("utf-8"), - "stderr": ret.stderr.decode("utf-8"), - }, - ) - capture_message(f"Command {cmd!r} failed.") - click.echo(ret.stdout.decode("utf-8"), err=True) - click.echo(ret.stderr.decode("utf-8"), err=True) - time_delta = (time.time() - start_time) / 1000 - click.echo(f"sentry-wrap: fail. {time_delta:.2f}s", err=True) - ctx.exit(1) - - else: - click.echo(ret.stdout.decode("utf-8")) - time_delta = (time.time() - start_time) / 1000 - click.echo(f"sentry-wrap: success! {time_delta:.2f}s") - - except click.exceptions.Exit: - raise - - except Exception as exc: - capture_exception(exc) - click.echo(traceback.format_exc(), err=True) - time_delta = (time.time() - start_time) / 1000 - click.echo(f"sentry-wrap: fail. {time_delta:.2f}s", err=True) - ctx.exit(1) - - -if __name__ == "__main__": - cli_main() diff --git a/bin/service-status.py b/bin/service-status.py deleted file mode 100755 index 7bd5a16174..0000000000 --- a/bin/service-status.py +++ /dev/null @@ -1,219 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -""" -This script looks at the ``/__version__`` endpoint information and tells you -how far behind different server environments are from main tip. - -This requires Python 3.8+ to run. See help text for more. - -See https://github.com/willkg/socorro-release/#readme for details. - -Note: If you want to use ``pyproject.toml`` and you're using Python <3.11, this -also requires the tomli library. - -repo: https://github.com/willkg/socorro-release/ -sha: d19f45bc9eedae34de2905cdd4adf7b9fd03f870 - -""" - -import argparse -import json -import os -import sys -from urllib.parse import urlparse -from urllib.request import urlopen - - -DESCRIPTION = """ -service-status.py tells you how far behind different server environments -are from main tip. - -For help, see: https://github.com/willkg/socorro-release/ -""" - -DEFAULT_CONFIG = { - # The name of the main branch in the repository - "main_branch": "main", - # List of "label=host" for hosts that have a /__version__ to check - "hosts": [], -} - - -def get_config(): - """Generates configuration. - - This tries to pull configuration from the ``[tool.service-status]`` table - from a ``pyproject.toml`` file. - - If neither exist, then it uses defaults. - - :returns: configuration dict - - """ - my_config = dict(DEFAULT_CONFIG) - - if os.path.exists("pyproject.toml"): - if sys.version_info >= (3, 11): - import tomllib - else: - try: - import tomli as tomllib - except ImportError: - print( - "For Python <3.11, you need to install tomli to work with pyproject.toml " - + "files." - ) - tomllib = None - - if tomllib is not None: - with open("pyproject.toml", "rb") as fp: - data = tomllib.load(fp) - - config_data = data.get("tool", {}).get("service-status", {}) - if config_data: - for key, default_val in my_config.items(): - my_config[key] = config_data.get(key, default_val) - - return my_config - - -def fetch(url, is_json=True): - """Fetch data from a url - - This raises URLError on HTTP request errors. It also raises JSONDecode - errors if it's not valid JSON. - - """ - fp = urlopen(url, timeout=5) - data = fp.read() - if is_json: - return json.loads(data) - return data - - -def fetch_history_from_github(main_branch, user, repo, from_sha): - return fetch( - "https://api.github.com/repos/%s/%s/compare/%s...%s" - % (user, repo, from_sha, main_branch) - ) - - -class StdoutOutput: - def section(self, name): - print("") - print("%s" % name) - print("=" * len(name)) - print("") - - def row(self, *args): - template = "%-13s " * len(args) - print(" " + template % args) - - def print_delta(self, main_branch, user, repo, sha): - resp = fetch_history_from_github(main_branch, user, repo, sha) - # from pprint import pprint - # pprint(resp) - if resp["total_commits"] == 0: - self.row("", "status", "identical") - else: - self.row("", "status", "%s commits" % resp["total_commits"]) - self.row() - self.row( - "", - "https://github.com/%s/%s/compare/%s...%s" - % ( - user, - repo, - sha[:8], - main_branch, - ), - ) - self.row() - for i, commit in enumerate(resp["commits"]): - if len(commit["parents"]) > 1: - # Skip merge commits - continue - - self.row( - "", - commit["sha"][:8], - ("HEAD: " if i == 0 else "") - + "%s (%s)" - % ( - commit["commit"]["message"].splitlines()[0][:60], - (commit["author"] or {}).get("login", "?")[:10], - ), - ) - self.row() - - -def main(): - config = get_config() - - parser = argparse.ArgumentParser(description=DESCRIPTION) - - # Add items that can be configured to argparse as configuration options. - # This makes it possible to specify or override configuration with command - # line arguments. - for key, val in config.items(): - key_arg = key.replace("_", "-") - if isinstance(val, list): - parser.add_argument( - f"--{key_arg}", - default=val, - nargs="+", - metavar="VALUE", - help=f"override configuration {key}; defaults to {val!r}", - ) - else: - default_val = val.replace("%", "%%") - parser.add_argument( - f"--{key_arg}", - default=val, - metavar="VALUE", - help=f"override configuration {key}; defaults to {default_val!r}", - ) - - args = parser.parse_args() - - main_branch = args.main_branch - hosts = args.hosts - - out = StdoutOutput() - - if not hosts: - print("no hosts specified.") - return 1 - - current_section = "" - - for line in hosts: - parts = line.split("=", 1) - if len(parts) == 1: - service = parts[0] - env_name = "environment" - else: - env_name, service = parts - - if current_section != env_name: - out.section(env_name) - current_section = env_name - - service = service.rstrip("/") - resp = fetch(f"{service}/__version__") - commit = resp["commit"] - tag = resp.get("version") or "(none)" - - parsed = urlparse(resp["source"]) - _, user, repo = parsed.path.split("/") - service_name = repo - out.row(service_name, "version", commit, tag) - out.print_delta(main_branch, user, repo, commit) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/bin/setup_services.sh b/bin/setup_services.sh index b0caa5b5e9..8dc46e259e 100755 --- a/bin/setup_services.sh +++ b/bin/setup_services.sh @@ -18,10 +18,10 @@ set -euo pipefail /app/bin/setup_postgres.sh # Delete and create local GCS buckets -/app/socorro-cmd gcs delete "${CRASHSTORAGE_GCS_BUCKET}" -/app/socorro-cmd gcs create "${CRASHSTORAGE_GCS_BUCKET}" -/app/socorro-cmd gcs delete "${TELEMETRY_GCS_BUCKET}" -/app/socorro-cmd gcs create "${TELEMETRY_GCS_BUCKET}" +gcs-cli delete "${CRASHSTORAGE_GCS_BUCKET}" +gcs-cli create "${CRASHSTORAGE_GCS_BUCKET}" +gcs-cli delete "${TELEMETRY_GCS_BUCKET}" +gcs-cli create "${TELEMETRY_GCS_BUCKET}" # Delete and create Elasticsearch indices /app/socorro-cmd legacy_es delete @@ -33,8 +33,16 @@ if [ "${ELASTICSEARCH_MODE^^}" == "PREFER_NEW" ]; then fi # Delete and create Pub/Sub queues -/app/socorro-cmd pubsub delete-all -/app/socorro-cmd pubsub create-all +pubsub-cli delete-topic "$PUBSUB_PROJECT_ID" "$PUBSUB_STANDARD_TOPIC_NAME" +pubsub-cli delete-topic "$PUBSUB_PROJECT_ID" "$PUBSUB_PRIORITY_TOPIC_NAME" +pubsub-cli delete-topic "$PUBSUB_PROJECT_ID" "$PUBSUB_REPROCESSING_TOPIC_NAME" + +pubsub-cli create-topic "$PUBSUB_PROJECT_ID" "$PUBSUB_STANDARD_TOPIC_NAME" +pubsub-cli create-topic "$PUBSUB_PROJECT_ID" "$PUBSUB_PRIORITY_TOPIC_NAME" +pubsub-cli create-topic "$PUBSUB_PROJECT_ID" "$PUBSUB_REPROCESSING_TOPIC_NAME" +pubsub-cli create-subscription "$PUBSUB_PROJECT_ID" "$PUBSUB_STANDARD_TOPIC_NAME" "$PUBSUB_STANDARD_SUBSCRIPTION_NAME" +pubsub-cli create-subscription "$PUBSUB_PROJECT_ID" "$PUBSUB_PRIORITY_TOPIC_NAME" "$PUBSUB_PRIORITY_SUBSCRIPTION_NAME" +pubsub-cli create-subscription "$PUBSUB_PROJECT_ID" "$PUBSUB_REPROCESSING_TOPIC_NAME" "$PUBSUB_REPROCESSING_SUBSCRIPTION_NAME" # Initialize the cronrun bookkeeping for all configured jobs to success /app/webapp/manage.py cronmarksuccess all diff --git a/bin/test.sh b/bin/test.sh index 5b900634f7..8143e8ede3 100755 --- a/bin/test.sh +++ b/bin/test.sh @@ -27,18 +27,20 @@ PYTHON="$(which python)" echo ">>> wait for services to be ready" -urlwait "${DATABASE_URL}" -urlwait "${LEGACY_ELASTICSEARCH_URL}" -urlwait "http://${PUBSUB_EMULATOR_HOST}" 10 -urlwait "${STORAGE_EMULATOR_HOST}/storage/v1/b" 10 -python ./bin/waitfor.py --verbose --codes=200,404 "${SENTRY_DSN}" +waitfor --verbose --conn-only "${DATABASE_URL}" +waitfor --verbose "${LEGACY_ELASTICSEARCH_URL}" +waitfor --verbose "http://${PUBSUB_EMULATOR_HOST}" +waitfor --verbose "${STORAGE_EMULATOR_HOST}/storage/v1/b" +waitfor --verbose --codes={200,404} "${SENTRY_DSN}" # wait for this last because it's slow to start -urlwait "${ELASTICSEARCH_URL}" 30 +waitfor --verbose --timeout=30 "${ELASTICSEARCH_URL}" echo ">>> build queue things and db things" # Clear Pub/Sub for tests -./socorro-cmd pubsub delete-all +pubsub-cli delete-topic "$PUBSUB_PROJECT_ID" "$PUBSUB_STANDARD_TOPIC_NAME" +pubsub-cli delete-topic "$PUBSUB_PROJECT_ID" "$PUBSUB_PRIORITY_TOPIC_NAME" +pubsub-cli delete-topic "$PUBSUB_PROJECT_ID" "$PUBSUB_REPROCESSING_TOPIC_NAME" # Set up socorro_test db ./socorro-cmd db drop || true diff --git a/bin/waitfor.py b/bin/waitfor.py deleted file mode 100755 index 8e097b70c7..0000000000 --- a/bin/waitfor.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -""" -Given a url, performs GET requests until it gets back an HTTP 200 or exceeds the wait -timeout. - -Usage: bin/waitfor.py [--timeout T] [--verbose] [--codes CODES] URL -""" - -import argparse -import urllib.error -import urllib.request -from urllib.parse import urlsplit -import sys -import time - - -def main(args): - parser = argparse.ArgumentParser( - description=( - "Performs GET requests against given URL until HTTP 200 or exceeds " - "wait timeout." - ) - ) - parser.add_argument("--verbose", action="store_true") - parser.add_argument("--timeout", type=int, default=15, help="Wait timeout") - parser.add_argument( - "--codes", - default="200", - help="Comma-separated list of valid HTTP response codes", - ) - parser.add_argument("url", help="URL to test") - - parsed = parser.parse_args(args) - - ok_codes = [int(code.strip()) for code in parsed.codes.split(",")] - - url = parsed.url - parsed_url = urlsplit(url) - if "@" in parsed_url.netloc: - netloc = parsed_url.netloc - netloc = netloc[netloc.find("@") + 1 :] - parsed_url = parsed_url._replace(netloc=netloc) - url = parsed_url.geturl() - - if parsed.verbose: - print(f"Testing {url} for {ok_codes!r} with timeout {parsed.timeout}...") - - start_time = time.time() - - last_fail = "" - while True: - try: - with urllib.request.urlopen(url, timeout=5) as resp: - if resp.code in ok_codes: - sys.exit(0) - last_fail = f"HTTP status code: {resp.code}" - except TimeoutError as error: - last_fail = f"TimeoutError: {error}" - except urllib.error.URLError as error: - if hasattr(error, "code") and error.code in ok_codes: - sys.exit(0) - last_fail = f"URLError: {error}" - - if parsed.verbose: - print(last_fail) - - time.sleep(0.5) - - delta = time.time() - start_time - if delta > parsed.timeout: - print(f"Failed: {last_fail}, elapsed: {delta:.2f}s") - sys.exit(1) - - -if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) diff --git a/docker-compose.yml b/docker-compose.yml index a852257645..236ba09104 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,6 +12,7 @@ services: depends_on: - fakesentry - statsd + - gcs-emulator - pubsub - postgresql - legacy-elasticsearch diff --git a/requirements.in b/requirements.in index b82a9a2f59..2926d1fc79 100644 --- a/requirements.in +++ b/requirements.in @@ -76,3 +76,7 @@ django==4.2.16 python-dateutil # via elasticsearch-dsl==0.0.11 six # via elasticsearch-dsl==0.0.11 urllib3>=1.8, <2.0 # via elasticsearch==1.9.0 + +# Mozilla obs-team libraries that are published to GAR instead of pypi +--extra-index-url https://us-python.pkg.dev/moz-fx-cavendish-prod/cavendish-prod-python/simple/ +obs-common==2024.11.13 diff --git a/requirements.txt b/requirements.txt index 5f91751236..b972f18926 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,6 +4,8 @@ # # pip-compile --generate-hashes --strip-extras # +--extra-index-url https://us-python.pkg.dev/moz-fx-cavendish-prod/cavendish-prod-python/simple/ + alabaster==0.7.16 \ --hash=sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65 \ --hash=sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92 @@ -199,6 +201,7 @@ click==8.1.7 \ --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de # via # -r requirements.in + # obs-common # pip-tools contextlib2==21.6.0 \ --hash=sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f \ @@ -355,11 +358,15 @@ google-cloud-core==2.4.1 \ google-cloud-pubsub==2.26.1 \ --hash=sha256:932d4434d86af25673082b48d54b318a448d1a7cd718404c33bf008ae9a8bb22 \ --hash=sha256:d46a302c2c7a008e399f4c04b4be6341d8aa7a537a25810ec8d38a5c125f816d - # via -r requirements.in + # via + # -r requirements.in + # obs-common google-cloud-storage==2.18.2 \ --hash=sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 \ --hash=sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99 - # via -r requirements.in + # via + # -r requirements.in + # obs-common google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ @@ -810,6 +817,9 @@ oauth2client==4.1.3 \ --hash=sha256:b8a81cc5d60e2d364f0b1b98f958dbd472887acaf1a5b05e21c28c31a2d6d3ac \ --hash=sha256:d486741e451287f69568a4d26d70d9acd73a2bbfa275746c535b4209891cccc6 # via -r requirements.in +obs-common==2024.11.13 \ + --hash=sha256:8805e99d864bab2410b811d46abe62295ad6b9f2def33d01dce40cb7cc165349 + # via -r requirements.in opentelemetry-api==1.27.0 \ --hash=sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7 \ --hash=sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342 @@ -1097,6 +1107,7 @@ requests==2.32.3 \ # google-api-core # google-cloud-storage # mozilla-django-oidc + # obs-common # requests-mock # sphinx requests-mock==1.12.1 \ @@ -1242,6 +1253,7 @@ sentry-sdk==2.17.0 \ # via # -r requirements.in # fillmore + # obs-common six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 diff --git a/socorro-cmd b/socorro-cmd index d2337a648e..8c1ea4f533 100755 --- a/socorro-cmd +++ b/socorro-cmd @@ -103,8 +103,6 @@ COMMANDS = [ "db": import_path("socorro.scripts.db.db_group"), "es": import_path("es_cli.main"), "legacy_es": import_path("legacy_es_cli.main"), - "pubsub": import_path("pubsub_cli.main"), - "gcs": import_path("gcs_cli.main"), }, ), Group( diff --git a/socorro/tests/test_gcs_cli.py b/socorro/tests/test_gcs_cli.py deleted file mode 100644 index 38db2b4c70..0000000000 --- a/socorro/tests/test_gcs_cli.py +++ /dev/null @@ -1,56 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -from uuid import uuid4 - -from click.testing import CliRunner - -from gcs_cli import gcs_group - - -def test_it_runs(): - """Test whether the module loads and spits out help.""" - runner = CliRunner() - result = runner.invoke(gcs_group, ["--help"]) - assert result.exit_code == 0 - - -def test_upload_file_to_root(gcs_helper, tmp_path): - """Test uploading one file to a bucket root.""" - bucket = gcs_helper.create_bucket("test").name - path = tmp_path / uuid4().hex - path.write_text(path.name) - result = CliRunner().invoke( - gcs_group, ["upload", str(path.absolute()), f"gs://{bucket}"] - ) - assert result.exit_code == 0 - assert gcs_helper.download(bucket, path.name) == path.name.encode("utf-8") - - -def test_upload_file_to_dir(gcs_helper, tmp_path): - """Test uploading one file to a directory inside a bucket.""" - bucket = gcs_helper.create_bucket("test").name - path = tmp_path / uuid4().hex - path.write_text(path.name) - result = CliRunner().invoke( - gcs_group, ["upload", str(path.absolute()), f"gs://{bucket}/{path.name}/"] - ) - assert result.exit_code == 0 - assert gcs_helper.download(bucket, f"{path.name}/{path.name}") == path.name.encode( - "utf-8" - ) - - -def test_upload_dir_to_dir(gcs_helper, tmp_path): - """Test uploading a whole directory to a directory inside a bucket.""" - bucket = gcs_helper.create_bucket("test").name - path = tmp_path / uuid4().hex - path.write_text(path.name) - result = CliRunner().invoke( - gcs_group, ["upload", str(tmp_path.absolute()), f"gs://{bucket}/{path.name}"] - ) - assert result.exit_code == 0 - assert gcs_helper.download(bucket, f"{path.name}/{path.name}") == path.name.encode( - "utf-8" - ) diff --git a/socorro/tests/test_pubsub_cli.py b/socorro/tests/test_pubsub_cli.py deleted file mode 100644 index 221fc0eeb6..0000000000 --- a/socorro/tests/test_pubsub_cli.py +++ /dev/null @@ -1,14 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -from click.testing import CliRunner - -from pubsub_cli import pubsub_group - - -def test_it_runs(): - """Test whether the module loads and spits out help.""" - runner = CliRunner() - result = runner.invoke(pubsub_group, ["--help"]) - assert result.exit_code == 0