From cfd960ee372106381b3c815afae51bd06281c833 Mon Sep 17 00:00:00 2001 From: Daniel Thorn Date: Mon, 4 Nov 2024 09:21:03 -0800 Subject: [PATCH] OBS-144: update from socorro-release to obs-common (#1100) --- bin/gcs_cli.py | 130 ------------ bin/license-check.py | 152 ------------- bin/pubsub_cli.py | 150 ------------- bin/release.py | 483 ------------------------------------------ bin/run_lint.sh | 6 +- bin/run_setup.sh | 14 +- bin/service-status.py | 222 ------------------- docs/dev.rst | 4 +- pyproject.toml | 2 - requirements.in | 3 + requirements.txt | 16 +- tests/test_bin.py | 32 --- 12 files changed, 29 insertions(+), 1185 deletions(-) delete mode 100755 bin/gcs_cli.py delete mode 100755 bin/license-check.py delete mode 100755 bin/pubsub_cli.py delete mode 100755 bin/release.py delete mode 100755 bin/service-status.py delete mode 100644 tests/test_bin.py diff --git a/bin/gcs_cli.py b/bin/gcs_cli.py deleted file mode 100755 index 795fecd5..00000000 --- a/bin/gcs_cli.py +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -# Manipulate emulated GCS storage. - -# Usage: ./bin/gcs_cli.py CMD - -import os - -import click - -from google.auth.credentials import AnonymousCredentials -from google.cloud import storage -from google.cloud.exceptions import NotFound - - -def get_endpoint_url(bucket_name): - base_url = os.environ["STORAGE_EMULATOR_HOST"] - return f"{base_url}/storage/v1/b/{bucket_name}" - - -def get_client(): - return storage.Client(credentials=AnonymousCredentials(), project="test") - - -@click.group() -def gcs_group(): - """Local dev environment GCS manipulation script""" - - -@gcs_group.command("create") -@click.argument("bucket_name") -def create_bucket(bucket_name): - """Creates a bucket - - Specify BUCKET_NAME. - - """ - # https://github.com/fsouza/fake-gcs-server/blob/0c31d1573c14912fc58ae68118f9c9ece266756a/README.md?plain=1#L47 - endpoint_url = get_endpoint_url(bucket_name) - - client = get_client() - - try: - client.get_bucket(bucket_name) - click.echo(f"GCS bucket {bucket_name!r} exists in {endpoint_url!r}.") - except NotFound: - client.create_bucket(bucket_name) - click.echo(f"GCS bucket {bucket_name!r} in {endpoint_url!r} created.") - - -@gcs_group.command("delete") -@click.argument("bucket_name") -def delete_bucket(bucket_name): - """Deletes a bucket - - Specify BUCKET_NAME. - - """ - # https://github.com/fsouza/fake-gcs-server/blob/0c31d1573c14912fc58ae68118f9c9ece266756a/README.md?plain=1#L47 - endpoint_url = get_endpoint_url(bucket_name) - - client = get_client() - - bucket = None - - try: - bucket = client.get_bucket(bucket_name) - except NotFound: - click.echo(f"GCS bucket {bucket_name!r} at {endpoint_url!r} does not exist.") - return - - # Delete any objects in the bucket - blobs = client.list_blobs(bucket_name) - for blob in blobs: - click.echo(f"Deleting GCS object {blob.name}...") - blob.delete() - - # Then delete the bucket - bucket.delete() - click.echo(f"GCS bucket {bucket_name!r} at {endpoint_url!r} deleted.") - - -@gcs_group.command("list_buckets") -@click.option("--details/--no-details", default=True, type=bool, help="With details") -def list_buckets(details): - """List GCS buckets""" - - client = get_client() - - buckets = client.list_buckets() - for bucket in buckets: - if details: - # https://cloud.google.com/storage/docs/json_api/v1/buckets#resource-representations - click.echo(f"{bucket.name}\t{bucket.time_created}") - else: - click.echo(f"{bucket.name}") - - -@gcs_group.command("list_objects") -@click.option("--details/--no-details", default=True, type=bool, help="With details") -@click.argument("bucket_name") -def list_objects(bucket_name, details): - """List contents of a bucket""" - - client = get_client() - - try: - client.get_bucket(bucket_name) - except NotFound: - click.echo(f"GCS bucket {bucket_name!r} does not exist.") - return - - blobs = list(client.list_blobs(bucket_name)) - if blobs: - for blob in blobs: - # https://cloud.google.com/storage/docs/json_api/v1/objects#resource-representations - if details: - click.echo(f"{blob.name}\t{blob.size}\t{blob.updated}") - else: - click.echo(f"{blob.name}") - else: - click.echo("No objects in bucket.") - - -if __name__ == "__main__": - gcs_group() diff --git a/bin/license-check.py b/bin/license-check.py deleted file mode 100755 index 05b75528..00000000 --- a/bin/license-check.py +++ /dev/null @@ -1,152 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -""" -This script checks files for license headers. - -This requires Python 3.8+ to run. - -See https://github.com/willkg/socorro-release/#readme for details. - -repo: https://github.com/willkg/socorro-release/ -sha: d19f45bc9eedae34de2905cdd4adf7b9fd03f870 - -""" - -import argparse -import pathlib -import subprocess -import sys - - -DESCRIPTION = ( - "Checks files in specified directory for license headers. " - + "If you don't specify a target, it'll check all files in \"git ls-files\"." -) - -# From https://www.mozilla.org/en-US/MPL/2.0/ -MPLV2 = [ - "This Source Code Form is subject to the terms of the Mozilla Public", - "License, v. 2.0. If a copy of the MPL was not distributed with this", - "file, You can obtain one at https://mozilla.org/MPL/2.0/.", -] - - -LANGUAGE_DATA = {".py": {"comment": ("#",)}} - - -def is_code_file(path: pathlib.Path): - """Determines whether the file is a code file we need to check. - - :param path: the Path for the file - - :returns: True if it's a code file to check, False otherwise. - - """ - if not path.is_file(): - return False - ending: pathlib.Path = path.suffix - return ending in LANGUAGE_DATA - - -def has_license_header(path: pathlib.Path): - """Determines if file at path has an MPLv2 license header. - - :param path: the Path for the file - - :returns: True if it does, False if it doesn't. - - """ - ending: pathlib.Path = path.suffix - comment_indicators = LANGUAGE_DATA[ending]["comment"] - - header = [] - with open(path, "r") as fp: - firstline = True - for line in fp.readlines(): - if firstline and line.startswith("#!"): - firstline = False - continue - - line = line.strip() - # NOTE(willkg): this doesn't handle multiline comments like in C++ - for indicator in comment_indicators: - line = line.strip(indicator) - line = line.strip() - - # Skip blank lines - if not line: - continue - - header.append(line) - if len(header) == len(MPLV2): - if header[: len(MPLV2)] == MPLV2: - return True - else: - break - - return False - - -def main(args): - parser = argparse.ArgumentParser(description=DESCRIPTION) - parser.add_argument( - "-l", "--file-only", action="store_true", help="print files only" - ) - parser.add_argument("--verbose", action="store_true", help="verbose output") - parser.add_argument("target", help="file or directory tree to check", nargs="?") - - parsed = parser.parse_args(args) - - if parsed.target: - target = pathlib.Path(parsed.target) - if not target.exists(): - if not parsed.file_only: - print(f"Not a valid file or directory: {target}") - return 1 - - if target.is_file(): - targets = [target] - - elif target.is_dir(): - targets = list(target.rglob("*")) - - else: - ret = subprocess.check_output(["git", "ls-files"]) - targets = [ - pathlib.Path(target.strip()) for target in ret.decode("utf-8").splitlines() - ] - - missing_headers = 0 - - # Iterate through all the files in this target directory - for path in targets: - if parsed.verbose: - print(f"Checking {path}") - if is_code_file(path) and not has_license_header(path): - missing_headers += 1 - if parsed.file_only: - print(str(path)) - else: - print(f"File {path} does not have license header.") - - if missing_headers > 0: - if not parsed.file_only: - print(f"Files with missing headers: {missing_headers}") - print("") - print("Add this:") - print("") - print("\n".join(MPLV2)) - return 1 - - if not parsed.file_only: - print("No files missing headers.") - - return 0 - - -if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) diff --git a/bin/pubsub_cli.py b/bin/pubsub_cli.py deleted file mode 100755 index ea34c42b..00000000 --- a/bin/pubsub_cli.py +++ /dev/null @@ -1,150 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -# Pub/Sub manipulation script. -# -# Note: Run this in the base container which has access to Pub/Sub. -# -# Usage: ./bin/pubsub_cli.py [SUBCOMMAND] - -import click -from google.cloud import pubsub_v1 -from google.api_core.exceptions import AlreadyExists, NotFound - - -@click.group() -def pubsub_group(): - """Local dev environment Pub/Sub emulator manipulation script.""" - - -@pubsub_group.command("list_topics") -@click.argument("project_id") -@click.pass_context -def list_topics(ctx, project_id): - """List topics for this project.""" - click.echo(f"Listing topics in project {project_id}.") - publisher = pubsub_v1.PublisherClient() - - for topic in publisher.list_topics(project=f"projects/{project_id}"): - click.echo(topic.name) - - -@pubsub_group.command("list_subscriptions") -@click.argument("project_id") -@click.argument("topic_name") -@click.pass_context -def list_subscriptions(ctx, project_id, topic_name): - """List subscriptions for a given topic.""" - click.echo(f"Listing subscriptions in topic {topic_name!r}:") - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) - - for subscription in publisher.list_topic_subscriptions(topic=topic_path): - click.echo(subscription) - - -@pubsub_group.command("create_topic") -@click.argument("project_id") -@click.argument("topic_name") -@click.pass_context -def create_topic(ctx, project_id, topic_name): - """Create topic.""" - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) - - try: - publisher.create_topic(name=topic_path) - click.echo(f"Topic created: {topic_path}") - except AlreadyExists: - click.echo("Topic already created.") - - -@pubsub_group.command("create_subscription") -@click.argument("project_id") -@click.argument("topic_name") -@click.argument("subscription_name") -@click.pass_context -def create_subscription(ctx, project_id, topic_name, subscription_name): - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) - - subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_name) - try: - subscriber.create_subscription(name=subscription_path, topic=topic_path) - click.echo(f"Subscription created: {subscription_path}") - except AlreadyExists: - click.echo("Subscription already created.") - - -@pubsub_group.command("delete_topic") -@click.argument("project_id") -@click.argument("topic_name") -@click.pass_context -def delete_topic(ctx, project_id, topic_name): - """Delete a topic.""" - publisher = pubsub_v1.PublisherClient() - subscriber = pubsub_v1.SubscriberClient() - topic_path = publisher.topic_path(project_id, topic_name) - - # Delete all subscriptions - for subscription in publisher.list_topic_subscriptions(topic=topic_path): - click.echo(f"Deleting {subscription} ...") - subscriber.delete_subscription(subscription=subscription) - - # Delete topic - try: - publisher.delete_topic(topic=topic_path) - click.echo(f"Topic deleted: {topic_name}") - except NotFound: - click.echo(f"Topic {topic_name} does not exist.") - - -@pubsub_group.command("publish") -@click.argument("project_id") -@click.argument("topic_name") -@click.argument("crash_id") -@click.pass_context -def publish(ctx, project_id, topic_name, crash_id): - """Publish crash_id to a given topic.""" - click.echo(f"Publishing crash_id to topic {topic_name!r}:") - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) - - future = publisher.publish(topic_path, crash_id.encode("utf-8"), timeout=5) - click.echo(future.result()) - - -@pubsub_group.command("pull") -@click.argument("project_id") -@click.argument("subscription_name") -@click.option("--ack/--no-ack", is_flag=True, default=False) -@click.option("--max-messages", default=1, type=int) -@click.pass_context -def pull(ctx, project_id, subscription_name, ack, max_messages): - """Pull crash id from a given subscription.""" - click.echo(f"Pulling crash id from subscription {subscription_name!r}:") - subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_name) - - response = subscriber.pull( - subscription=subscription_path, max_messages=max_messages - ) - if not response.received_messages: - return - - ack_ids = [] - for msg in response.received_messages: - click.echo(f"crash id: {msg.message.data}") - ack_ids.append(msg.ack_id) - - if ack: - # Acknowledges the received messages so they will not be sent again. - subscriber.acknowledge(subscription=subscription_path, ack_ids=ack_ids) - - -if __name__ == "__main__": - pubsub_group() diff --git a/bin/release.py b/bin/release.py deleted file mode 100755 index 6912035d..00000000 --- a/bin/release.py +++ /dev/null @@ -1,483 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -""" -This script handles releases for this project. - -This has two subcommands: ``make-bug`` and ``make-tag``. See the help text for -both. - -This requires Python 3.8+ to run. - -Note: If you want to use ``pyproject.toml`` and you're using Python <3.11, this -also requires the tomli library. - -See https://github.com/willkg/socorro-release/#readme for details. - -repo: https://github.com/willkg/socorro-release/ -sha: d19f45bc9eedae34de2905cdd4adf7b9fd03f870 - -""" - -import argparse -import configparser -import datetime -import json -import os -import re -import shlex -import subprocess -import sys -from urllib.parse import urlencode -from urllib.request import urlopen - - -DESCRIPTION = """ -release.py makes it easier to create deploy bugs and push tags to trigger -deploys. - -For help, see: https://github.com/willkg/socorro-release/ -""" - -GITHUB_API = "https://api.github.com/" -BZ_CREATE_URL = "https://bugzilla.mozilla.org/enter_bug.cgi" -BZ_BUG_JSON_URL = "https://bugzilla.mozilla.org/rest/bug/" - -DEFAULT_CONFIG = { - # Bugzilla product and component to write new bugs in - "bugzilla_product": "", - "bugzilla_component": "", - # GitHub user and project name - "github_user": "", - "github_project": "", - # The name of the main branch - "main_branch": "", - # The tag structure using datetime formatting markers - "tag_name_template": "%Y.%m.%d", -} - -LINE = "=" * 80 - -# Recognize "bug-NNNNNNN", "bug NNNNNNN", and multi-bug variants -BUG_RE = re.compile(r"\bbug(?:s?:?\s*|-)([\d\s,\+&#and]+)\b", re.IGNORECASE) - -# Recognize "bug-NNNNNNN" -BUG_HYPHEN_PREFIX_RE = re.compile(r"bug-([\d]+)", re.IGNORECASE) - - -def get_config(): - """Generates configuration. - - This tries to pull configuration from: - - 1. the ``[tool.release]`` table from a ``pyproject.toml`` file, OR - 2. the ``[tool:release]`` section of a ``setup.cfg`` file - - If neither exist, then it uses defaults. - - :returns: configuration dict - - """ - my_config = dict(DEFAULT_CONFIG) - - if os.path.exists("pyproject.toml"): - if sys.version_info >= (3, 11): - import tomllib - else: - try: - import tomli as tomllib - except ImportError: - print( - "For Python <3.11, you need to install tomli to work with pyproject.toml " - + "files." - ) - tomllib = None - - if tomllib is not None: - with open("pyproject.toml", "rb") as fp: - data = tomllib.load(fp) - - config_data = data.get("tool", {}).get("release", {}) - if config_data: - for key, default_val in my_config.items(): - my_config[key] = config_data.get(key, default_val) - return my_config - - if os.path.exists("setup.cfg"): - config = configparser.ConfigParser() - config.read("setup.cfg") - - if "tool:release" in config: - config = config["tool:release"] - for key, default_val in my_config.items(): - my_config[key] = config.get(key, default_val) - - return my_config - - return my_config - - -def find_bugs(line): - """Returns all the bug numbers from the line. - - >>> get_bug_numbers("some line") - [] - >>> get_bug_numbers("bug-1111111: some line") - ["1111111"] - >>> get_bug_numbers("bug 1111111, 2222222: some line") - ["1111111", "2222222"] - - """ - matches = BUG_RE.findall(line) - if not matches: - return [] - bugs = [] - for match in matches: - for part in re.findall(r"\d+", match): - if part: - bugs.append(part) - return bugs - - -def fetch(url, is_json=True): - """Fetch data from a url - - This raises URLError on HTTP request errors. It also raises JSONDecode - errors if it's not valid JSON. - - """ - fp = urlopen(url) - data = fp.read() - if is_json: - return json.loads(data) - return data - - -def fetch_history_from_github(owner, repo, from_rev, main_branch): - url = f"{GITHUB_API}repos/{owner}/{repo}/compare/{from_rev}...{main_branch}" - return fetch(url) - - -def check_output(cmdline, **kwargs): - args = shlex.split(cmdline) - return subprocess.check_output(args, **kwargs).decode("utf-8").strip() - - -def get_remote_name(github_user): - """Figures out the right remote to use - - People name the git remote differently, so this figures out which one to - use. - - :arg str github_user: the github user for the remote name to use - - :returns: the name of the remote - - :raises Exception: if it can't figure out the remote name for the specified - user - - """ - # Figure out remote to push tag to - remote_output = check_output("git remote -v") - - def check_ssh(github_user, remote_url): - return f":{github_user}/" in remote_url - - def check_https(github_user, remote_url): - return f"/{github_user}/" in remote_url - - for line in remote_output.splitlines(): - line = line.split("\t") - if check_ssh(github_user, line[1]) or check_https(github_user, line[1]): - return line[0] - - raise Exception(f"Can't figure out remote name for {github_user}.") - - -def make_tag( - bug_number, - github_project, - github_user, - remote_name, - tag_name, - commits_since_tag, -): - """Tags a release.""" - if bug_number: - resp = fetch(BZ_BUG_JSON_URL + bug_number, is_json=True) - bug_summary = resp["bugs"][0]["summary"] - - input(f">>> Using bug {bug_number}: {bug_summary}. Correct? Ctrl-c to cancel") - - message = ( - f"Tag {tag_name} (bug #{bug_number})\n\n" - + "\n".join(commits_since_tag) - + f"\n\nDeploy bug #{bug_number}" - ) - else: - message = f"Tag {tag_name}\n\n" + "\n".join(commits_since_tag) - - # Print out new tag information - print("") - print(">>> New tag: %s" % tag_name) - print(">>> Tag message:") - print(LINE) - print(message) - print(LINE) - - # Create tag - input(f">>> Ready to tag {tag_name}? Ctrl-c to cancel") - print("") - print(">>> Creating tag...") - subprocess.check_call(["git", "tag", "-s", tag_name, "-m", message]) - - # Push tag - input(f">>> Ready to push to remote {remote_name}? Ctrl-c to cancel") - print("") - print(">>> Pushing...") - subprocess.check_call(["git", "push", "--tags", remote_name, tag_name]) - - if bug_number: - # Show url to tag information on GitHub for bug comment - tag_url = ( - f"https://github.com/{github_user}/{github_project}/releases/tag/{tag_name}" - ) - print("") - print(f">>> Copy and paste this tag url into bug #{bug_number}.") - print(">>> %<-----------------------------------------------") - print(f"{tag_url}") - print(">>> %<-----------------------------------------------") - - -def make_bug( - github_project, - tag_name, - commits_since_tag, - bugs_referenced, - bugzilla_product, - bugzilla_component, -): - """Creates a bug.""" - summary = f"{github_project} deploy: {tag_name}" - print(">>> Creating deploy bug...") - print(">>> Summary") - print(summary) - print() - - description = [ - f"We want to do a deploy for `{github_project}` tagged `{tag_name}`.", - "", - "It consists of the following commits:", - "", - ] - description.extend(commits_since_tag) - if bugs_referenced: - description.append("") - description.append("Bugs referenced:") - description.append("") - for bug in sorted(bugs_referenced): - description.append(f"* bug #{bug}") - description = "\n".join(description) - - print(">>> Description") - print(description) - print() - - if bugzilla_product: - bz_params = { - "priority": "P2", - "bug_type": "task", - "comment": description, - "form_name": "enter_bug", - "short_desc": summary, - } - - bz_params["product"] = bugzilla_product - if bugzilla_component: - bz_params["component"] = bugzilla_component - - bugzilla_link = BZ_CREATE_URL + "?" + urlencode(bz_params) - print(">>> Link to create bug (may not work if it's sufficiently long)") - print(bugzilla_link) - - -def run(): - config = get_config() - - parser = argparse.ArgumentParser(description=DESCRIPTION) - - # Add items that can be configured to argparse as configuration options. - # This makes it possible to specify or override configuration with command - # line arguments. - for key, val in config.items(): - key_arg = key.replace("_", "-") - default_val = val.replace("%", "%%") - parser.add_argument( - f"--{key_arg}", - default=val, - help=f"override configuration {key}; defaults to {default_val!r}", - ) - - subparsers = parser.add_subparsers(dest="cmd") - subparsers.required = True - - subparsers.add_parser("make-bug", help="Make a deploy bug") - make_tag_parser = subparsers.add_parser("make-tag", help="Make a tag and push it") - make_tag_parser.add_argument( - "--with-bug", dest="bug", help="Bug for this deploy if any." - ) - make_tag_parser.add_argument( - "--with-tag", - dest="tag", - help="Tag to use; defaults to figuring out the tag using tag_name_template.", - ) - - args = parser.parse_args() - - github_project = args.github_project - github_user = args.github_user - main_branch = args.main_branch - tag_name_template = args.tag_name_template - - if not github_project or not github_user or not main_branch: - print("main_branch, github_project, and github_user are required.") - print( - "Either set them in pyproject.toml/setup.cfg or specify them as command " - + "line arguments." - ) - return 1 - - # Let's make sure we're up-to-date and on main branch - current_branch = check_output("git rev-parse --abbrev-ref HEAD") - if current_branch != main_branch: - print( - f"Must be on the {main_branch} branch to do this; currently on {current_branch}" - ) - return 1 - - # The current branch can't be dirty - try: - subprocess.check_call("git diff --quiet --ignore-submodules HEAD".split()) - except subprocess.CalledProcessError: - print( - "Can't be \"git dirty\" when we're about to git pull. " - "Stash or commit what you're working on." - ) - return 1 - - remote_name = get_remote_name(github_user) - - # Get existing git tags from remote - check_output( - f"git pull {remote_name} {main_branch} --tags", stderr=subprocess.STDOUT - ) - - # Figure out the most recent tag details - all_tags = check_output("git tag --list --sort=-creatordate").splitlines() - if all_tags: - last_tag = all_tags[0] - last_tag_message = check_output(f'git tag -l --format="%(contents)" {last_tag}') - print(f">>> Last tag was: {last_tag}") - print(">>> Message:") - print(LINE) - print(last_tag_message) - print(LINE) - - resp = fetch_history_from_github( - github_user, github_project, last_tag, main_branch - ) - if resp["status"] != "ahead": - print(f"Nothing to deploy! {resp['status']}") - return - else: - first_commit = check_output("git rev-list --max-parents=0 HEAD") - resp = fetch_history_from_github(github_user, github_project, first_commit) - - bugs_referenced = set() - commits_since_tag = [] - for commit in resp["commits"]: - # Skip merge commits - if len(commit["parents"]) > 1: - continue - - # Use the first 7 characters of the commit sha - sha = commit["sha"][:7] - - # Use the first line of the commit message which is the summary and - # truncate it to 80 characters - summary = commit["commit"]["message"] - summary = summary.splitlines()[0] - summary = summary[:80] - - # Bug 1868455: While GitHub autolinking doesn't suport spaces, Bugzilla - # autolinking doesn't support hyphens. When creating a bug, we want to - # use "bug NNNNNNN" form so Bugzilla autolinking works. - if args.cmd == "make-bug": - summary = BUG_HYPHEN_PREFIX_RE.sub(r"bug \1", summary) - - bugs = find_bugs(summary) - if bugs: - bugs_referenced |= set(bugs) - - # Figure out who did the commit prefering GitHub usernames - who = commit["author"] - if not who: - who = "?" - else: - who = who.get("login", "?") - - commits_since_tag.append("`%s`: %s (%s)" % (sha, summary, who)) - - # Use specified tag or figure out next tag name as YYYY.MM.DD format - if args.cmd == "make-tag" and args.tag: - tag_name = args.tag - else: - tag_name = datetime.datetime.now().strftime(tag_name_template) - - # If there's already a tag, then increment the -N until we find a tag name - # that doesn't exist, yet - existing_tags = check_output(f'git tag -l "{tag_name}*"').splitlines() - if existing_tags: - tag_name_attempt = tag_name - index = 2 - while tag_name_attempt in existing_tags: - tag_name_attempt = f"{tag_name}-{index}" - index += 1 - tag_name = tag_name_attempt - - if args.cmd == "make-bug": - make_bug( - github_project, - tag_name, - commits_since_tag, - bugs_referenced, - args.bugzilla_product, - args.bugzilla_component, - ) - - elif args.cmd == "make-tag": - if args.bugzilla_product and args.bugzilla_component and not args.bug: - print( - "Bugzilla product and component are specified, but you didn't " - + "specify a bug number with --with-bug." - ) - return 1 - make_tag( - args.bug, - github_project, - github_user, - remote_name, - tag_name, - commits_since_tag, - ) - - else: - parser.print_help() - return 1 - - -if __name__ == "__main__": - sys.exit(run()) diff --git a/bin/run_lint.sh b/bin/run_lint.sh index 9ef8eaea..08070a6a 100755 --- a/bin/run_lint.sh +++ b/bin/run_lint.sh @@ -28,12 +28,12 @@ else echo ">>> license check (${PYTHON_VERSION})" if [[ -d ".git" ]]; then - # If the .git directory exists, we can let license-check.py do + # If the .git directory exists, we can let license-check do # git ls-files. - python bin/license-check.py + license-check else # The .git directory doesn't exist, so run it on all the Python # files in the tree. - python bin/license-check.py . + license-check . fi fi diff --git a/bin/run_setup.sh b/bin/run_setup.sh index 9432aefe..ad01786d 100755 --- a/bin/run_setup.sh +++ b/bin/run_setup.sh @@ -13,12 +13,12 @@ set -euo pipefail echo "Delete and create GCS bucket..." -python ./bin/gcs_cli.py delete "${CRASHMOVER_CRASHSTORAGE_BUCKET_NAME}" -python ./bin/gcs_cli.py create "${CRASHMOVER_CRASHSTORAGE_BUCKET_NAME}" -python ./bin/gcs_cli.py list_buckets +gcs-cli delete "${CRASHMOVER_CRASHSTORAGE_BUCKET_NAME}" +gcs-cli create "${CRASHMOVER_CRASHSTORAGE_BUCKET_NAME}" +gcs-cli list_buckets echo "Delete and create Pub/Sub topic..." -python ./bin/pubsub_cli.py delete_topic "${CRASHMOVER_CRASHPUBLISH_PROJECT_ID}" "${CRASHMOVER_CRASHPUBLISH_TOPIC_NAME}" -python ./bin/pubsub_cli.py create_topic "${CRASHMOVER_CRASHPUBLISH_PROJECT_ID}" "${CRASHMOVER_CRASHPUBLISH_TOPIC_NAME}" -python ./bin/pubsub_cli.py create_subscription "${CRASHMOVER_CRASHPUBLISH_PROJECT_ID}" "${CRASHMOVER_CRASHPUBLISH_TOPIC_NAME}" "${CRASHMOVER_CRASHPUBLISH_SUBSCRIPTION_NAME}" -python ./bin/pubsub_cli.py list_topics "${CRASHMOVER_CRASHPUBLISH_PROJECT_ID}" +pubsub-cli delete_topic "${CRASHMOVER_CRASHPUBLISH_PROJECT_ID}" "${CRASHMOVER_CRASHPUBLISH_TOPIC_NAME}" +pubsub-cli create_topic "${CRASHMOVER_CRASHPUBLISH_PROJECT_ID}" "${CRASHMOVER_CRASHPUBLISH_TOPIC_NAME}" +pubsub-cli create_subscription "${CRASHMOVER_CRASHPUBLISH_PROJECT_ID}" "${CRASHMOVER_CRASHPUBLISH_TOPIC_NAME}" "${CRASHMOVER_CRASHPUBLISH_SUBSCRIPTION_NAME}" +pubsub-cli list_topics "${CRASHMOVER_CRASHPUBLISH_PROJECT_ID}" diff --git a/bin/service-status.py b/bin/service-status.py deleted file mode 100755 index 0985ed50..00000000 --- a/bin/service-status.py +++ /dev/null @@ -1,222 +0,0 @@ -#!/usr/bin/env python - -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -""" -This script looks at the ``/__version__`` endpoint information and tells you -how far behind different server environments are from main tip. - -This requires Python 3.8+ to run. See help text for more. - -See https://github.com/willkg/socorro-release/#readme for details. - -Note: If you want to use ``pyproject.toml`` and you're using Python <3.11, this -also requires the tomli library. - -repo: https://github.com/willkg/socorro-release/ -sha: d19f45bc9eedae34de2905cdd4adf7b9fd03f870 - -""" - -import argparse -import json -import os -import sys -from urllib.parse import urlparse -from urllib.request import urlopen - - -DESCRIPTION = """ -service-status.py tells you how far behind different server environments -are from main tip. - -For help, see: https://github.com/willkg/socorro-release/ -""" - -DEFAULT_CONFIG = { - # The name of the main branch in the repository - "main_branch": "main", - # List of "label=host" for hosts that have a /__version__ to check - "hosts": [], -} - - -def get_config(): - """Generates configuration. - - This tries to pull configuration from the ``[tool.service-status]`` table - from a ``pyproject.toml`` file. - - If neither exist, then it uses defaults. - - :returns: configuration dict - - """ - my_config = dict(DEFAULT_CONFIG) - - if os.path.exists("pyproject.toml"): - if sys.version_info >= (3, 11): - import tomllib - else: - try: - import tomli as tomllib - except ImportError: - print( - "For Python <3.11, you need to install tomli to work with pyproject.toml " - + "files." - ) - tomllib = None - - if tomllib is not None: - with open("pyproject.toml", "rb") as fp: - data = tomllib.load(fp) - - config_data = data.get("tool", {}).get("service-status", {}) - if config_data: - for key, default_val in my_config.items(): - my_config[key] = config_data.get(key, default_val) - - return my_config - - -def fetch(url, is_json=True): - """Fetch data from a url - - This raises URLError on HTTP request errors. It also raises JSONDecode - errors if it's not valid JSON. - - """ - if not url.startswith(("http:", "https:")): - raise ValueError("URL must start with 'http:' or 'https:'") - # NOTE(willkg): ruff S310 can't determine whether we've validated the url or not - fp = urlopen(url, timeout=5) # noqa: S310 - data = fp.read() - if is_json: - return json.loads(data) - return data - - -def fetch_history_from_github(main_branch, user, repo, from_sha): - return fetch( - "https://api.github.com/repos/%s/%s/compare/%s...%s" - % (user, repo, from_sha, main_branch) - ) - - -class StdoutOutput: - def section(self, name): - print("") - print("%s" % name) - print("=" * len(name)) - print("") - - def row(self, *args): - template = "%-13s " * len(args) - print(" " + template % args) - - def print_delta(self, main_branch, user, repo, sha): - resp = fetch_history_from_github(main_branch, user, repo, sha) - # from pprint import pprint - # pprint(resp) - if resp["total_commits"] == 0: - self.row("", "status", "identical") - else: - self.row("", "status", "%s commits" % resp["total_commits"]) - self.row() - self.row( - "", - "https://github.com/%s/%s/compare/%s...%s" - % ( - user, - repo, - sha[:8], - main_branch, - ), - ) - self.row() - for i, commit in enumerate(resp["commits"]): - if len(commit["parents"]) > 1: - # Skip merge commits - continue - - self.row( - "", - commit["sha"][:8], - ("HEAD: " if i == 0 else "") - + "%s (%s)" - % ( - commit["commit"]["message"].splitlines()[0][:60], - (commit["author"] or {}).get("login", "?")[:10], - ), - ) - self.row() - - -def main(): - config = get_config() - - parser = argparse.ArgumentParser(description=DESCRIPTION) - - # Add items that can be configured to argparse as configuration options. - # This makes it possible to specify or override configuration with command - # line arguments. - for key, val in config.items(): - key_arg = key.replace("_", "-") - if isinstance(val, list): - parser.add_argument( - f"--{key_arg}", - default=val, - nargs="+", - metavar="VALUE", - help=f"override configuration {key}; defaults to {val!r}", - ) - else: - default_val = val.replace("%", "%%") - parser.add_argument( - f"--{key_arg}", - default=val, - metavar="VALUE", - help=f"override configuration {key}; defaults to {default_val!r}", - ) - - args = parser.parse_args() - - main_branch = args.main_branch - hosts = args.hosts - - out = StdoutOutput() - - if not hosts: - print("no hosts specified.") - return 1 - - current_section = "" - - for line in hosts: - parts = line.split("=", 1) - if len(parts) == 1: - service = parts[0] - env_name = "environment" - else: - env_name, service = parts - - if current_section != env_name: - out.section(env_name) - current_section = env_name - - service = service.rstrip("/") - resp = fetch(f"{service}/__version__") - commit = resp["commit"] - tag = resp.get("version") or "(none)" - - parsed = urlparse(resp["source"]) - _, user, repo = parsed.path.split("/") - service_name = repo - out.row(service_name, "version", commit, tag) - out.print_delta(main_branch, user, repo, commit) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/docs/dev.rst b/docs/dev.rst index 4be83ee1..9018fd99 100644 --- a/docs/dev.rst +++ b/docs/dev.rst @@ -153,11 +153,11 @@ production, see documentation_. The ``gcs-emulator`` container stores data in memory and the data doesn't persist between container restarts. - You can use the ``bin/gcs_cli.py`` to access it: + You can use ``gcs-cli`` to access it: .. code-block:: shell - $ docker compose run --rm web shell python bin/gcs_cli.py list_buckets + $ docker compose run --rm web shell gcs-cli list_buckets If you do this a lot, turn it into a shell script. diff --git a/pyproject.toml b/pyproject.toml index ba61f911..9b36ae3a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,8 +13,6 @@ docstring-quotes = "double" [tool.ruff.lint.per-file-ignores] "tests/*" = ["S101"] "systemtest/*" = ["S101"] -"bin/license-check.py" = ["S603", "S607"] -"bin/release.py" = ["S310", "S603", "S607"] [tool.pytest.ini_options] diff --git a/requirements.in b/requirements.in index 035d5f0b..5856d5bf 100644 --- a/requirements.in +++ b/requirements.in @@ -24,3 +24,6 @@ Sphinx==8.1.3 sphinx-rtd-theme==3.0.1 urlwait==1.0 werkzeug==3.0.6 +# Mozilla obs-team libraries that are published to GAR instead of pypi +--extra-index-url https://us-python.pkg.dev/moz-fx-cavendish-prod/cavendish-prod-python/simple/ +obs-common==2024.11.01.post1 diff --git a/requirements.txt b/requirements.txt index a14f2d0c..bd23212e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,6 +4,8 @@ # # pip-compile --generate-hashes --strip-extras # +--extra-index-url https://us-python.pkg.dev/moz-fx-cavendish-prod/cavendish-prod-python/simple/ + alabaster==0.7.16 \ --hash=sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65 \ --hash=sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92 @@ -133,6 +135,7 @@ click==8.1.7 \ --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de # via # -r requirements.in + # obs-common # pip-tools datadog==0.49.1 \ --hash=sha256:4a56d57490ea699a0dfd9253547485a57b4120e93489defadcf95c66272374d6 \ @@ -234,11 +237,15 @@ google-cloud-core==2.4.1 \ google-cloud-pubsub==2.26.1 \ --hash=sha256:932d4434d86af25673082b48d54b318a448d1a7cd718404c33bf008ae9a8bb22 \ --hash=sha256:d46a302c2c7a008e399f4c04b4be6341d8aa7a537a25810ec8d38a5c125f816d - # via -r requirements.in + # via + # -r requirements.in + # obs-common google-cloud-storage==2.18.2 \ --hash=sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 \ --hash=sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99 - # via -r requirements.in + # via + # -r requirements.in + # obs-common google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ @@ -501,6 +508,9 @@ more-itertools==10.5.0 \ --hash=sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef \ --hash=sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6 # via -r requirements.in +obs-common==2024.11.1.post1 \ + --hash=sha256:ed02bbdd7ef793feedeb2a6c91f02ad669ceb9c1913e1d9aff209731e3353e18 + # via -r requirements.in opentelemetry-api==1.27.0 \ --hash=sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7 \ --hash=sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342 @@ -649,6 +659,7 @@ requests==2.32.3 \ # datadog # google-api-core # google-cloud-storage + # obs-common # sphinx rich==13.6.0 \ --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ @@ -684,6 +695,7 @@ sentry-sdk==2.17.0 \ # via # -r requirements.in # fillmore + # obs-common six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 diff --git a/tests/test_bin.py b/tests/test_bin.py deleted file mode 100644 index 57131cbc..00000000 --- a/tests/test_bin.py +++ /dev/null @@ -1,32 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. - -from pathlib import Path -import sys - -from click.testing import CliRunner - -# Add bin/ directory so we can import scripts -REPO_ROOT = Path(__file__).parent.parent.resolve() -sys.path.insert(0, str(REPO_ROOT / "bin")) - - -class TestPubSubCli: - def test_basic(self): - """Basic test to make sure pubsub_cli imports and runs at all.""" - from pubsub_cli import pubsub_group - - runner = CliRunner() - result = runner.invoke(pubsub_group, []) - assert result.exit_code == 0 - - -class TestGcsCli: - def test_basic(self): - """Basic test to make sure gcs_cli imports and runs at all.""" - from gcs_cli import gcs_group - - runner = CliRunner() - result = runner.invoke(gcs_group, []) - assert result.exit_code == 0