diff --git a/.github/actions/upload_managed_plugin/action.yml b/.github/actions/upload_managed_plugin/action.yml new file mode 100644 index 0000000000..efb26f4589 --- /dev/null +++ b/.github/actions/upload_managed_plugin/action.yml @@ -0,0 +1,86 @@ +name: upload-managed-plugin +description: Upload binaries as rpk managed plugin +inputs: + aws_access_key_id: + description: For accessing S3 bucket + required: true + aws_secret_access_key: + description: For accessing S3 bucket + required: true + aws_region: + description: For accessing S3 bucket + required: true + aws_s3_bucket: + description: S3 bucket to use + required: true + artifacts_file: + description: Path to goreleaser artifacts.json + required: true + metadata_file: + description: Path to goreleaser artifacts.json + required: true + project_root_dir: + description: Root dir of goreleaser project + required: true + plugin_name: + description: Should match the goreleaser build id for the binary E.g. "connect" + required: true + goos: + description: CSV list of target OS's to filter on + required: true + goarch: + description: CSV list of target arch's to filter on + required: true + repo_hostname: + description: RPK Plugins repo hostname. E.g. rpk-plugins.redpanda.com + required: true + dry_run: + description: Dry run means skipping writes to S3 + required: true + +runs: + using: "composite" + steps: + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ inputs.aws_access_key_id }} + aws-secret-access-key: ${{ inputs.aws_secret_access_key }} + aws-region: ${{ inputs.aws_region }} + + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: install deps + working-directory: resources/plugin_uploader + shell: bash + run: pip install -r requirements.txt + + - name: Upload archives + working-directory: resources/plugin_uploader + shell: bash + run: | + DRY_RUN_FLAG=${{ inputs.dry_run && '--dry-run' || '' }} + ./plugin_uploader.py upload-archives \ + --artifacts-file=${{ inputs.artifacts_file }} \ + --metadata-file=${{ inputs.metadata_file }} \ + --project-root-dir=${{ inputs.project_root_dir }} \ + --region=${{ inputs.aws_region }} \ + --bucket=${{ inputs.aws_s3_bucket }} \ + --plugin=${{ inputs.plugin_name }} \ + --goos=${{ inputs.goos }} \ + --goarch=${{ inputs.goarch }} \ + $DRY_RUN_FLAG + + - name: Upload manifest + working-directory: resources/plugin_uploader + shell: bash + run: | + DRY_RUN_FLAG=${{ inputs.dry_run && '--dry-run' || '' }} + ./plugin_uploader.py upload-manifest \ + --region=${{ inputs.aws_region }} \ + --bucket=${{ inputs.aws_s3_bucket }} \ + --plugin=${{ inputs.plugin_name }} \ + --repo-hostname=${{ inputs.repo_hostname }} \ + $DRY_RUN_FLAG \ No newline at end of file diff --git a/.github/workflows/test_plugin_uploader.yml b/.github/workflows/test_plugin_uploader.yml new file mode 100644 index 0000000000..bc79e8f6ad --- /dev/null +++ b/.github/workflows/test_plugin_uploader.yml @@ -0,0 +1,61 @@ +name: Test Plugin Uploader + +on: + push: + branches: + - main + paths: + - 'resources/plugin_uploader/**' + - '.github/workflows/test_plugin_uploader.yml' + pull_request: + paths: + - 'resources/plugin_uploader/**' + - '.github/workflows/test_plugin_uploader.yml' + +jobs: + unit-test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - working-directory: resources/plugin_uploader + run: pip install -r requirements_test.txt + + - working-directory: resources/plugin_uploader + run: pytest -vv . + + ruff-lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Lint with Ruff + working-directory: resources/plugin_uploader + run: | + pip install ruff==0.4.10 + ruff check --output-format=github + + pyright-type-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - working-directory: resources/plugin_uploader + run: pip install -r requirements_test.txt + + - run: pip install pyright==1.1.378 + + - working-directory: resources/plugin_uploader + run: pyright diff --git a/.github/workflows/upload_plugin.yml b/.github/workflows/upload_plugin.yml new file mode 100644 index 0000000000..1cb46563c8 --- /dev/null +++ b/.github/workflows/upload_plugin.yml @@ -0,0 +1,83 @@ +name: Upload rpk connect plugin to S3 + +on: + push: + branches: + - main + tags: + # All runs triggered by tag will really push to S3. + # Take care when adding more patterns here. + - 'v[0-9]+.[0-9]+.[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+' + pull_request: + # Keep CI snappy for unrelated PRs + paths: + - 'resources/plugin_uploader/**' + - '.github/workflows/upload_plugin.yml' + - '.github/actions/upload_managed_plugin/**' + - '.goreleaser.yml' + workflow_dispatch: {} + +env: + # Do dry run in most cases, UNLESS the triggering event was a "tag". + DRY_RUN: ${{ ! github.ref_type == 'tag' }} + +jobs: + upload_rpk_connect_plugin: + # Let's make this fast by using a beefy runner. + runs-on: ubuntu-latest-32 + if: ${{ github.repository == 'redpanda-data/connect' && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'redpanda-data/connect') }} + permissions: + contents: read + id-token: write + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: ${{ vars.RP_AWS_CRED_REGION }} + role-to-assume: arn:aws:iam::${{ secrets.RP_AWS_CRED_ACCOUNT_ID }}:role/${{ vars.RP_AWS_CRED_BASE_ROLE_NAME }}${{ github.event.repository.name }} + + - name: Get secrets from AWS Secrets Manager (for read/writing S3-backed plugins repo) + uses: aws-actions/aws-secretsmanager-get-secrets@v2 + with: + secret-ids: | + ,sdlc/prod/github/rpk_plugin_publisher + parse-json-secrets: true + + - uses: actions/checkout@v4 + + - uses: actions/setup-go@v5 + with: + go-version: 1.22.x + check-latest: true + + - name: Build binaries (dry run / snapshot mode) + if: ${{ env.DRY_RUN }} + uses: goreleaser/goreleaser-action@v6 + with: + version: 1.26.2 + args: build --snapshot + + - name: Build binaries + if: ${{ ! env.DRY_RUN }} + uses: goreleaser/goreleaser-action@v6 + with: + version: 1.26.2 + args: build + + - name: Upload connect plugin to S3 + uses: ./.github/actions/upload_managed_plugin + with: + aws_access_key_id: ${{ env.RPK_PLUGIN_PUBLISHER_AWS_ACCESS_KEY_ID }} + aws_secret_access_key: ${{ env.RPK_PLUGIN_PUBLISHER_AWS_SECRET_ACCESS_KEY }} + aws_region: "us-west-2" + aws_s3_bucket: "rpk-plugins-repo" + project_root_dir: ${{ github.workspace }} + artifacts_file: ${{ github.workspace }}/target/dist/artifacts.json + metadata_file: ${{ github.workspace }}/target/dist/metadata.json + plugin_name: "connect" + goos: linux,darwin + goarch: amd64,arm64 + repo_hostname: rpk-plugins.redpanda.com + dry_run: ${{ env.DRY_RUN }} + diff --git a/.gitignore b/.gitignore index f21515d335..78ef7bbb58 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ release_notes.md .idea .vscode .op +__pycache__ diff --git a/resources/plugin_uploader/README.md b/resources/plugin_uploader/README.md new file mode 100644 index 0000000000..1e5d72f513 --- /dev/null +++ b/resources/plugin_uploader/README.md @@ -0,0 +1,102 @@ +# Plugin uploader + +## Description + +``` +Usage: plugin_uploader.py [OPTIONS] COMMAND [ARGS]... + +CLI tool to upload/index goreleaser-built binaries to/in S3. + +Options: +--help Show this message and exit. + +Commands: +upload-archives Create tar.gz archives from binaries and upload to S3 +upload-manifest Create manifest.json and upload to S3 + +`plugin_uploader.py` is used to upload the binaries generated by goreleaser to S3 in a manner that is consumable by RPK as a plugin. + +``` + +## Install + +`pip install -r requirements.txt` + +## How to use + +Primary use case is in GitHub Actions in response to creation of a GitHub release. + +See `.github/workflows/upload_plugin.yml` to see this in action. + +It's expected that you have used goreleaser to build a set of binaries for a given release tag (such as following a +GitHub release tag creation). + +Goreleaser creates a `$DIST` directory (`dist/` by default) at the project root dir containing all built binaries and +two JSON files: + +* `$DIST/--/` +* ... +* `$DIST/artifacts.json` +* `$DIST/metadata.json` + +### Create archives from binaries and upload them + +Locate the `artifact.json` and `metadata.json` files produced by Goreleaser. +E.g. `$DIST/artifacts.json`, `$DIST/metadata.json`. + +```shell +./plugin_uploader.py upload-archives \ + --artifacts-file=$DIST/artifacts.json \ + --metadata-file=$DIST/metadata.json \ + --project-root-dir= \ + --region= \ + --bucket= \ + --plugin= \ + --goos= \ + --goarch= +``` + +`PROJECT_ROOT` should be the root directory of the Golang project (by default, where `.goreleaser.yml` lives) + +`PLUGIN_NAME` should match the `` as defined in goreleaser configs. + +It's assumed that the output binary filename is `redpanda-`. E.g. for the **connect** project: + +* `build-id` is `connect` +* Binary is `redpanda-connect` + +A binary is included for archival / upload only if it matches some `--goos` AND some `--goarch`. + +`--dry-run` is available for skipping final S3 upload step. + +AWS permissions are needed for these actions on the S3 bucket: + +* `s3:PutObject` +* `s3:PutObjectTagging` + You may also need permissions on any AWS KMS keys used for server side encryption of the S3 bucket. + +### Create manifest.json and upload it + +This lists all archives for the specific plugin and constructs a `manifest.json` from the listing. + +This should be run after uploading any archives. + +```shell +./plugin_uploader.py upload-manifest \ + --region= \ + --bucket= \ + --plugin= \ + --repo-hostname= +``` + +`--repo-hostname` is used for generating the right public facing download URLs for archives in the plugin repo. E.g. +`rpk-plugins.redpanda.com` + +`--dry-run` is available for skipping the final S3 upload step. + +AWS permissions are needed for these actions on the S3 bucket: + +* `s3:PutObject` +* `s3:ListBucket` +* `s3:GetObjectTagging` + You may also need permissions on any AWS KMS keys used for server side encryption of the S3 bucket. \ No newline at end of file diff --git a/resources/plugin_uploader/plugin_uploader.py b/resources/plugin_uploader/plugin_uploader.py new file mode 100755 index 0000000000..a6374db858 --- /dev/null +++ b/resources/plugin_uploader/plugin_uploader.py @@ -0,0 +1,439 @@ +#!/usr/bin/env python3 + +import collections +import dataclasses +import json +import hashlib +import logging +import os +import re +import time +import urllib.parse + +import tarfile +import tempfile + +import boto3 +import click + +from pydantic import BaseModel +from contextlib import contextmanager + + +# Partial schema of goreleaser metadata.json +class Metadata(BaseModel): + tag: str + version: str + + +# Partial schema of goreleaser artifacts.json +class Artifact(BaseModel): + name: str + path: str + type: str + goos: str | None = None + goarch: str | None = None + + +@dataclasses.dataclass +class PluginConfig: + """Encapsulates config specific to a plugin (like `connect`)""" + + plugin_name: str + binary_name: str + + # All these path methods return S3 paths + def get_manifest_path(self) -> str: + return f"{self.plugin_name}/manifest.json" + + def get_archives_root_path(self) -> str: + return f"{self.plugin_name}/archives" + + def get_archives_version_dir_path(self, version: str) -> str: + return f"{self.get_archives_root_path()}/{version}" + + def get_archive_full_path(self, binary_artifact: Artifact, version: str) -> str: + return f"{self.get_archives_version_dir_path(version)}/{binary_artifact.name}-{binary_artifact.goos}-{binary_artifact.goarch}.tar.gz" + + +def get_plugin_config(plugin_name: str) -> PluginConfig: + return PluginConfig(plugin_name=plugin_name, binary_name=f"redpanda-{plugin_name}") + + +def get_binary_sha256_digest(filepath: str) -> str: + with open(filepath, "rb") as f: + s = hashlib.sha256(f.read()) + return s.hexdigest() + + +def get_artifacts(artifacts_file: str) -> list[Artifact]: + with open(artifacts_file, "r") as f: + data = json.load(f) + assert isinstance(data, list), f"Expected {artifacts_file} to contain a JSON list payload" + result = [] + for item in data: + artifact = Artifact(**item) + result.append(artifact) + return result + + +def get_metadata(metadata_file: str) -> Metadata: + with open(metadata_file, "r") as f: + data = json.load(f) + assert isinstance(data, dict), f"Expected {metadata_file} to contain a JSON dict payload" + return Metadata(**data) + + +class S3BucketClient: + """A wrapper around boto3 S3 client that knows the bucket it works with. + Comes with higher level methods as needed.""" + + def __init__(self, bucket: str, region: str): + self._client = boto3.client("s3", region_name=region) + self._bucket = bucket + + def upload_file_with_tags( + self, file: str, object_path: str, tags: dict[str, str] = {} + ): + with open(file, "rb") as f: + return self.upload_blob_with_tags(f.read(), object_path, tags=tags) + + def upload_blob_with_tags( + self, data: bytes, object_path: str, tags: dict[str, str] = {} + ): + self._client.put_object( + Bucket=self._bucket, + Body=data, + Key=object_path, + # We want users to receive latest stuff promptly. + # This minimizes inconsistencies between manifest.json and archives when served over + # Cloudfront + CacheControl="max-age=1", + Tagging=urllib.parse.urlencode(tags), + ) + + def list_dir_recursive(self, s3_dir_path: str | None = None) -> list[str]: + paginator = self._client.get_paginator("list_objects_v2") + if s3_dir_path is None: + pages = paginator.paginate(Bucket=self._bucket) + else: + pages = paginator.paginate(Bucket=self._bucket, Prefix=s3_dir_path) + + keys = [] + for page in pages: + # Indicates empty results, break out immediately + if "Contents" not in page: + break + for obj in page["Contents"]: + keys.append(obj["Key"]) + return keys + + def get_object_tags(self, object_path: str) -> dict[str, str]: + response = self._client.get_object_tagging( + Bucket=self._bucket, + Key=object_path, + ) + result = {} + for tag in response["TagSet"]: + result[tag["Key"]] = tag["Value"] + return result + + +def create_tar_gz_archive(single_filepath: str) -> str: + tmp_archive = tempfile.mktemp() + with tarfile.open(tmp_archive, "w:gz") as tar: + tar.add(single_filepath, arcname=os.path.basename(single_filepath)) + return tmp_archive + + +TAG_BINARY_NAME = "redpanda/binary_name" +TAG_BINARY_SHA256 = "redpanda/binary_sha256" +TAG_GOOS = "redpanda/goos" +TAG_GOARCH = "redpanda/goarch" +TAG_VERSION = "redpanda/version" + + +@contextmanager +def cwd(new_dir: str): + # Code to acquire resource, e.g.: + old_dir = os.getcwd() + try: + os.chdir(new_dir) + yield + finally: + os.chdir(old_dir) + + +def create_and_upload_one_archive(artifact: Artifact, plugin_config: PluginConfig, project_root_dir: str, version: str, + bucket: str, region: str, dry_run: bool): + if dry_run: + s3_bucket_client = None + else: + s3_bucket_client = S3BucketClient(bucket, region) + logging.info(f"Processing {artifact}") + + with cwd(project_root_dir): + binary_sha256 = get_binary_sha256_digest(artifact.path) + logging.info(f"Binary SHA256 = {binary_sha256}") + tmp_archive = None + try: + tmp_archive = create_tar_gz_archive(artifact.path) + logging.info(f"Created archive {tmp_archive}") + s3_path_for_archive = plugin_config.get_archive_full_path( + binary_artifact=artifact, version=version + ) + + tags = { + TAG_BINARY_NAME: plugin_config.binary_name, + TAG_BINARY_SHA256: binary_sha256, + TAG_GOOS: artifact.goos, + TAG_GOARCH: artifact.goarch, + TAG_VERSION: version, + } + if dry_run: + logging.info( + f"DRY-RUN - Would have uploaded archive to S3 bucket {bucket} as {s3_path_for_archive}" + ) + logging.info(f"Tags: {json.dumps(tags, indent=4)}") + else: + logging.info( + f"Uploading archive to S3 bucket {bucket} as {s3_path_for_archive}" + ) + assert ( + s3_bucket_client is not None + ), "s3_bucket_client should be initialized in non-dry-run mode" + s3_bucket_client.upload_file_with_tags( + file=tmp_archive, object_path=s3_path_for_archive, tags=tags + ) + finally: + if tmp_archive and os.path.exists(tmp_archive): + os.unlink(tmp_archive) + logging.info("DONE") + + +def create_and_upload_archives( + project_root_dir: str, + plugin_config: PluginConfig, + artifacts: list[Artifact], + bucket: str, + region: str, + version: str, + dry_run: bool, +): + for artifact in artifacts: + create_and_upload_one_archive( + artifact=artifact, + plugin_config=plugin_config, + project_root_dir=project_root_dir, + version=version, + bucket=bucket, + region=region, + dry_run=dry_run, + ) + + +def get_max_version_str(version_strs: list[str]) -> str | None: + max_version = None + max_version_tuple = None + for version in version_strs: + # Only real releases are eligible to be latest. E.g. no RCs. + m = re.search(r"^(\d+)\.(\d+).(\d+)$", version) + if not m: + continue + version_tuple = (int(m[1]), int(m[2]), int(m[3])) + if max_version_tuple is None or version_tuple > max_version_tuple: + max_version_tuple = version_tuple + max_version = version + return max_version + + +def get_object_tags_for_keys( + s3_bucket_client: S3BucketClient, keys: list[str] +) -> dict[str, dict[str, str]]: + return {k: s3_bucket_client.get_object_tags(k) for k in keys} + + +def create_and_upload_manifest_json( + plugin_config: PluginConfig, + bucket: str, + region: str, + repo_hostname: str, + dry_run: bool, +): + # Even for dry-run mode, we will READ from S3 bucket. We just won't write anything to S3. + # Therefore, S3 creds are needed even for --dry-run + s3_bucket_client = S3BucketClient(bucket, region) + list_path = plugin_config.get_archives_root_path().rstrip("/") + "/" + logging.info(f"Listing all objects in bucket {bucket} under path {list_path}") + keys = s3_bucket_client.list_dir_recursive(list_path) + + object_tags_for_keys = get_object_tags_for_keys(s3_bucket_client, keys) + + archives = [] + manifest = { + "created_at": int(time.time()), + "archives": archives, + } + version_to_artifact_infos: dict[str, list[dict[str, str]]] = ( + collections.defaultdict(list) + ) + for key, tag_map in object_tags_for_keys.items(): + try: + binary_name = tag_map[TAG_BINARY_NAME] + if binary_name != plugin_config.binary_name: + logging.info(f"Skipping {key}, wrong binary name: {binary_name}") + continue + logging.info(f"Found {key} with tags: {tag_map}") + version_to_artifact_infos[tag_map[TAG_VERSION]].append( + { + "binary_name": tag_map[TAG_BINARY_NAME], + "binary_sha256": tag_map[TAG_BINARY_SHA256], + "goos": tag_map[TAG_GOOS], + "goarch": tag_map[TAG_GOARCH], + "path": key, + } + ) + except KeyError as ke: + logging.info(f"Skipping {key}, missing tag: {ke}") + continue + + max_version: str | None = None + if not version_to_artifact_infos: + logging.warning(f"No artifacts found in bucket {bucket} for {plugin_config.plugin_name}") + else: + max_version = get_max_version_str(list(version_to_artifact_infos)) + if max_version is None: + logging.warning("No real releases found (may be only RCs?)") + logging.info(f"All versions found: {list(version_to_artifact_infos)}") + + for version, artifact_infos in version_to_artifact_infos.items(): + artifacts: dict[str, dict[str, str]] = {} + for artifact_info in artifact_infos: + artifacts[f"{artifact_info['goos']}-{artifact_info['goarch']}"] = { + "path": f"https://{repo_hostname}/{artifact_info["path"]}", + "sha256": artifact_info["binary_sha256"], + } + archive = { + "version": version, + "artifacts": artifacts, + } + if version == max_version: + archive["is_latest"] = True + archives.append(archive) + logging.info("Manifest:") + manifest_json = json.dumps(manifest, indent=4, sort_keys=True) + logging.info(manifest_json) + if dry_run: + logging.info( + f"DRY-RUN - Would have uploaded manifest.json to {plugin_config.get_manifest_path()}" + ) + else: + logging.info(f"Uploading manifest.json to {plugin_config.get_manifest_path()}") + s3_bucket_client.upload_blob_with_tags( + object_path=plugin_config.get_manifest_path(), + data=manifest_json.encode("utf-8"), + ) + + +@click.group(help="CLI tool to upload/index goreleaser-built binaries to/in S3.") +def cli(): + logging.basicConfig( + level=logging.INFO, format="%(asctime)s %(levelname)s %(name)s %(message)s" + ) + + +@cli.command(name="upload-archives", help="Create tar.gz archives from binaries and upload to S3") +@click.option( + "--artifacts-file", + required=True, + help="artifacts.json file produced by `goreleaser`", +) +@click.option( + "--metadata-file", required=True, help="metadata.json file produced by `goreleaser`" +) +@click.option( + "--project-root-dir", required=True, + help="Root directory of the Go project. File paths within artifacts.json are relative to this directory." +) +@click.option("--region", required=True) +@click.option("--bucket", required=True) +@click.option("--plugin", required=True, help="Plugin to process. E.g. `connect`") +@click.option( + "--goos", + required=True, + help="CSV list of OSes to process binaries for. E.g. 'linux,darwin'", +) +@click.option( + "--goarch", + required=True, + help="CSV list of architectures to process binaries for. E.g. 'amd64,arm64'", +) +@click.option( + "--deduce-version-from-tag", + is_flag=True, + help="Deduce version from tag in metadata.json", +) +@click.option("--dry-run", is_flag=True, ) +def upload_archives( + artifacts_file: str, + metadata_file: str, + project_root_dir: str, + region: str, + bucket: str, + plugin: str, + goos: str, + goarch: str, + deduce_version_from_tag: bool, + dry_run: bool, +): + goos_list = goos.split(",") + goarch_list = goarch.split(",") + plugin_config = get_plugin_config(plugin) + artifacts = get_artifacts(artifacts_file) + if deduce_version_from_tag: + version = get_metadata(metadata_file).tag.lstrip("v") + else: + version = get_metadata(metadata_file).version + artifacts_to_process = [ + a + for a in artifacts + if a.type == "Binary" + and a.name == plugin_config.binary_name + and a.goos in goos_list + and a.goarch in goarch_list + ] + logging.info(f"Found {len(artifacts_to_process)} artifacts to process") + for a in artifacts_to_process: + logging.info(f" {a}") + create_and_upload_archives( + project_root_dir=project_root_dir, + plugin_config=plugin_config, + artifacts=artifacts_to_process, + version=version, + region=region, + bucket=bucket, + dry_run=dry_run, + ) + + +@cli.command(name="upload-manifest", help="Create manifest.json and upload to S3") +@click.option("--bucket", required=True) +@click.option("--region", required=True) +@click.option("--repo-hostname", required=True) +@click.option("--plugin", required=True, help="Plugin to process. E.g. `connect`") +@click.option("--dry-run", is_flag=True) +def upload_manifest( + bucket: str, region: str, repo_hostname: str, plugin: str, dry_run: bool +): + plugin_config = get_plugin_config(plugin) + create_and_upload_manifest_json( + plugin_config=plugin_config, + bucket=bucket, + region=region, + repo_hostname=repo_hostname, + dry_run=dry_run, + ) + + +if __name__ == "__main__": + cli() diff --git a/resources/plugin_uploader/requirements.txt b/resources/plugin_uploader/requirements.txt new file mode 100644 index 0000000000..c3a1ee0f94 --- /dev/null +++ b/resources/plugin_uploader/requirements.txt @@ -0,0 +1,3 @@ +pydantic>=2.8 +boto3>=1.26 +click==8.1.7 \ No newline at end of file diff --git a/resources/plugin_uploader/requirements_test.txt b/resources/plugin_uploader/requirements_test.txt new file mode 100644 index 0000000000..9c844cfda9 --- /dev/null +++ b/resources/plugin_uploader/requirements_test.txt @@ -0,0 +1,5 @@ +pydantic>=2.8 +boto3>=1.26 +click==8.1.7 +moto[s3]==5.0.13 +pytest==8.3.2 \ No newline at end of file diff --git a/resources/plugin_uploader/test_data/dist/artifacts.json b/resources/plugin_uploader/test_data/dist/artifacts.json new file mode 100644 index 0000000000..12946f37c3 --- /dev/null +++ b/resources/plugin_uploader/test_data/dist/artifacts.json @@ -0,0 +1,35 @@ +[ + { + "name": "metadata.json", + "path": "dist/metadata.json", + "internal_type": 30, + "type": "Metadata" + }, + { + "name": "redpanda-cow", + "path": "dist/cow_linux_amd64_v1/redpanda-cow", + "goos": "linux", + "goarch": "amd64", + "goamd64": "v1", + "internal_type": 4, + "type": "Binary", + "extra": { + "Binary": "redpanda-cow", + "Ext": "", + "ID": "cow" + } + }, + { + "name": "redpanda-cow", + "path": "dist/cow_darwin_arm64/redpanda-cow", + "goos": "darwin", + "goarch": "arm64", + "internal_type": 4, + "type": "Binary", + "extra": { + "Binary": "redpanda-cow", + "Ext": "", + "ID": "cow" + } + } +] \ No newline at end of file diff --git a/resources/plugin_uploader/test_data/dist/cow_darwin_arm64/redpanda-cow b/resources/plugin_uploader/test_data/dist/cow_darwin_arm64/redpanda-cow new file mode 100644 index 0000000000..e69de29bb2 diff --git a/resources/plugin_uploader/test_data/dist/cow_linux_amd64_v1/redpanda-cow b/resources/plugin_uploader/test_data/dist/cow_linux_amd64_v1/redpanda-cow new file mode 100644 index 0000000000..e69de29bb2 diff --git a/resources/plugin_uploader/test_data/dist/metadata_v4_34_0.json b/resources/plugin_uploader/test_data/dist/metadata_v4_34_0.json new file mode 100644 index 0000000000..664b807c43 --- /dev/null +++ b/resources/plugin_uploader/test_data/dist/metadata_v4_34_0.json @@ -0,0 +1,12 @@ +{ + "project_name": "cow", + "tag": "v4.34.0", + "previous_tag": "v4.33.0-rc2", + "version": "4.34.0", + "commit": "7eb28f2a994e277f17bf0530097d99208e65cddb", + "date": "2024-08-29T23:53:58.388135715Z", + "runtime": { + "goos": "linux", + "goarch": "arm64" + } +} \ No newline at end of file diff --git a/resources/plugin_uploader/test_data/dist/metadata_v4_35_0.json b/resources/plugin_uploader/test_data/dist/metadata_v4_35_0.json new file mode 100644 index 0000000000..1218b32cb8 --- /dev/null +++ b/resources/plugin_uploader/test_data/dist/metadata_v4_35_0.json @@ -0,0 +1,12 @@ +{ + "project_name": "cow", + "tag": "v4.35.0", + "previous_tag": "v4.34.0-rc2", + "version": "4.35.0", + "commit": "7eb28f2a994e277f17bf0530097d99208e65cddb", + "date": "2024-08-29T23:53:58.388135715Z", + "runtime": { + "goos": "linux", + "goarch": "arm64" + } +} \ No newline at end of file diff --git a/resources/plugin_uploader/test_data/dist/metadata_v4_36_0_rc1.json b/resources/plugin_uploader/test_data/dist/metadata_v4_36_0_rc1.json new file mode 100644 index 0000000000..77d0ba5e72 --- /dev/null +++ b/resources/plugin_uploader/test_data/dist/metadata_v4_36_0_rc1.json @@ -0,0 +1,12 @@ +{ + "project_name": "cow", + "tag": "v4.36.0-rc1", + "previous_tag": "v4.34.0-rc2", + "version": "4.36.0-rc1", + "commit": "7eb28f2a994e277f17bf0530097d99208e65cddb", + "date": "2024-08-29T23:53:58.388135715Z", + "runtime": { + "goos": "linux", + "goarch": "arm64" + } +} \ No newline at end of file diff --git a/resources/plugin_uploader/test_plugin_uploader.py b/resources/plugin_uploader/test_plugin_uploader.py new file mode 100644 index 0000000000..a0130b81fe --- /dev/null +++ b/resources/plugin_uploader/test_plugin_uploader.py @@ -0,0 +1,250 @@ +import json +import unittest +from typing import Any + +import boto3 +from moto import mock_aws +from plugin_uploader import S3BucketClient, PluginConfig, cli +import os +from click.testing import CliRunner + +TEST_BUCKET = "my-bucket" +TEST_REGION = "my-region" +TEST_PLUGIN = PluginConfig(plugin_name="cow", binary_name="redpanda-cow") + + +def create_bucket_and_return_clients(): + """Create TEST_BUCKET bucket and return S3BucketClient and boto3 S3 client for it.""" + client = boto3.client("s3", region_name=TEST_REGION) + client.create_bucket( + Bucket=TEST_BUCKET, + CreateBucketConfiguration={"LocationConstraint": TEST_REGION}, + ) + + # S3BucketClient, boto3 S3 client + return S3BucketClient(TEST_BUCKET, TEST_REGION), client + + +class TestS3BucketClient(unittest.TestCase): + @mock_aws + def test_list_dir_recursive(self): + bucket_client, _ = create_bucket_and_return_clients() + keys_added = set() + for i in range(2048): + key = f"root/{i}/{i}" + keys_added.add(key) + bucket_client.upload_blob_with_tags(object_path=key, data=b"") + found_keys = bucket_client.list_dir_recursive("root") + assert set(found_keys) == keys_added + + +RESIDENT_DIR_PATH = os.path.dirname(os.path.realpath(__file__)) +# "test_data" here would map to root of the real go project (like root of connect repo) +TEST_DATA_DIR_PATH = f"{RESIDENT_DIR_PATH}/test_data" + + +class TestUploadArchives(unittest.TestCase): + + @mock_aws + def test_end_to_end_upload(self): + """Run upload-archives, then upload-manifest + verify all archives and correct manifest uploaded""" + bucket_client, s3_client = create_bucket_and_return_clients() + + runner = CliRunner() + + ARTIFACTS_FILE = f"{TEST_DATA_DIR_PATH}/dist/artifacts.json" + + def _run_and_validate_upload_archives( + metadata_file: str, expected_keys: set[str] + ): + # make bucket_client early, ensures bucket is created before we run the command + os.chdir(TEST_DATA_DIR_PATH) + _result = runner.invoke( + cli, + [ + "upload-archives", + f"--artifacts-file={ARTIFACTS_FILE}", + f"--metadata-file={metadata_file}", + f"--project-root-dir={TEST_DATA_DIR_PATH}", + f"--region={TEST_REGION}", + f"--bucket={TEST_BUCKET}", + f"--plugin={TEST_PLUGIN.plugin_name}", + "--goos=linux,darwin,windows", + "--goarch=amd64,arm64,turing", + ], + # TODO check if regular cli execution also transparent re: exceptions (we want that) + catch_exceptions=False, + ) + assert _result.exit_code == 0 + found_keys = set(bucket_client.list_dir_recursive()) + print(found_keys) + assert found_keys == expected_keys + + def _run_and_validate_upload_manifests(expected_manifest: dict[str, Any]): + # upload-manifests (verify both versions of archives show up in manifest.json) + result = runner.invoke( + cli, + [ + "upload-manifest", + f"--region={TEST_REGION}", + f"--bucket={TEST_BUCKET}", + f"--plugin={TEST_PLUGIN.plugin_name}", + "--repo-hostname=cow.farm.com", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0 + response = s3_client.get_object(Bucket=TEST_BUCKET, Key="cow/manifest.json") + found_manifest = json.load(response["Body"]) + + # align created_at - that is always different + found_manifest["created_at"] = 1700000000 + assert expected_manifest == found_manifest + + # upload-manifests before we have ANY archives in S3 (empty manifest.json) + _run_and_validate_upload_manifests(expected_manifest={ + "archives": [], + "created_at": 1700000000, + }) + + # upload-archives (upload an RC) + _run_and_validate_upload_archives( + metadata_file=f"{TEST_DATA_DIR_PATH}/dist/metadata_v4_36_0_rc1.json", + expected_keys={ + "cow/manifest.json", + "cow/archives/4.36.0-rc1/redpanda-cow-darwin-arm64.tar.gz", + "cow/archives/4.36.0-rc1/redpanda-cow-linux-amd64.tar.gz", + }, + ) + # RC's show up in manifest.json but should never be marked "is_latest" + _run_and_validate_upload_manifests(expected_manifest={ + "archives": [ + { + 'artifacts': { + 'darwin-arm64': { + 'path': 'https://cow.farm.com/cow/archives/4.36.0-rc1/redpanda-cow-darwin-arm64.tar.gz', + 'sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + }, + 'linux-amd64': { + 'path': 'https://cow.farm.com/cow/archives/4.36.0-rc1/redpanda-cow-linux-amd64.tar.gz', + 'sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + }, + }, + 'version': '4.36.0-rc1', + }, + + ], + "created_at": 1700000000, + }) + + # upload-archives (upload a real version 4.34.0 that has a lower version number than the RC) + _run_and_validate_upload_archives( + metadata_file=f"{TEST_DATA_DIR_PATH}/dist/metadata_v4_34_0.json", + expected_keys={ + "cow/manifest.json", + "cow/archives/4.34.0/redpanda-cow-darwin-arm64.tar.gz", + "cow/archives/4.34.0/redpanda-cow-linux-amd64.tar.gz", + "cow/archives/4.36.0-rc1/redpanda-cow-darwin-arm64.tar.gz", + "cow/archives/4.36.0-rc1/redpanda-cow-linux-amd64.tar.gz", + }, + ) + # verify that 4.34 marked as latest, NOT the RC. + _run_and_validate_upload_manifests(expected_manifest={ + "archives": [ + { + 'artifacts': { + 'darwin-arm64': { + 'path': 'https://cow.farm.com/cow/archives/4.34.0/redpanda-cow-darwin-arm64.tar.gz', + 'sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + }, + 'linux-amd64': { + 'path': 'https://cow.farm.com/cow/archives/4.34.0/redpanda-cow-linux-amd64.tar.gz', + 'sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + }, + }, + 'is_latest': True, + 'version': '4.34.0', + }, + { + 'artifacts': { + 'darwin-arm64': { + 'path': 'https://cow.farm.com/cow/archives/4.36.0-rc1/redpanda-cow-darwin-arm64.tar.gz', + 'sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + }, + 'linux-amd64': { + 'path': 'https://cow.farm.com/cow/archives/4.36.0-rc1/redpanda-cow-linux-amd64.tar.gz', + 'sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + }, + }, + 'version': '4.36.0-rc1', + }, + + ], + "created_at": 1700000000, + }) + + # upload-archives (newer release v4.35.0) + _run_and_validate_upload_archives( + metadata_file=f"{TEST_DATA_DIR_PATH}/dist/metadata_v4_35_0.json", + expected_keys={ + "cow/manifest.json", + "cow/archives/4.34.0/redpanda-cow-darwin-arm64.tar.gz", + "cow/archives/4.34.0/redpanda-cow-linux-amd64.tar.gz", + "cow/archives/4.35.0/redpanda-cow-darwin-arm64.tar.gz", + "cow/archives/4.35.0/redpanda-cow-linux-amd64.tar.gz", + "cow/archives/4.36.0-rc1/redpanda-cow-darwin-arm64.tar.gz", + "cow/archives/4.36.0-rc1/redpanda-cow-linux-amd64.tar.gz", + }, + ) + # verify that is_latest points to v4.36.0 + _run_and_validate_upload_manifests(expected_manifest={ + "archives": [ + { + 'artifacts': { + 'darwin-arm64': { + 'path': 'https://cow.farm.com/cow/archives/4.34.0/redpanda-cow-darwin-arm64.tar.gz', + 'sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + }, + 'linux-amd64': { + 'path': 'https://cow.farm.com/cow/archives/4.34.0/redpanda-cow-linux-amd64.tar.gz', + 'sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + }, + }, + 'version': '4.34.0', + }, + { + 'artifacts': { + 'darwin-arm64': { + 'path': 'https://cow.farm.com/cow/archives/4.35.0/redpanda-cow-darwin-arm64.tar.gz', + 'sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + }, + 'linux-amd64': { + 'path': 'https://cow.farm.com/cow/archives/4.35.0/redpanda-cow-linux-amd64.tar.gz', + 'sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + }, + }, + 'is_latest': True, + 'version': '4.35.0', + }, + { + 'artifacts': { + 'darwin-arm64': { + 'path': 'https://cow.farm.com/cow/archives/4.36.0-rc1/redpanda-cow-darwin-arm64.tar.gz', + 'sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + }, + 'linux-amd64': { + 'path': 'https://cow.farm.com/cow/archives/4.36.0-rc1/redpanda-cow-linux-amd64.tar.gz', + 'sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', + }, + }, + 'version': '4.36.0-rc1', + }, + + ], + "created_at": 1700000000, + }) + + +if __name__ == "__main__": + unittest.main()