From dd2263a7b83f1744a7cab71e014e22295f498f29 Mon Sep 17 00:00:00 2001 From: illuminatus Date: Tue, 3 Oct 2023 00:44:53 +0200 Subject: [PATCH 1/2] Delete .github/workflows/test-workflow.yml --- .github/workflows/test-workflow.yml | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 .github/workflows/test-workflow.yml diff --git a/.github/workflows/test-workflow.yml b/.github/workflows/test-workflow.yml deleted file mode 100644 index b5a20a5..0000000 --- a/.github/workflows/test-workflow.yml +++ /dev/null @@ -1,4 +0,0 @@ -name: Test Workflow - -on: - workflow_dispatch: From 63c179333cbf9ef40b396bcff2bee8722f88dba8 Mon Sep 17 00:00:00 2001 From: illuminatus Date: Mon, 15 Apr 2024 15:12:06 -0700 Subject: [PATCH 2/2] PTFE-1366 - Migrate registry containers for spark to ghcr.io (#52) * Migrate to ghcr.io s3utils 1.14.0 not available on ghcr.io, bumping to s3utils:1.14.6. The next installer build will get the most current s3utils fixes * Apply review suggestion Co-authored-by: Thomas Carmet <8408330+tcarmet@users.noreply.github.com> * Apply review suggestion Co-authored-by: Thomas Carmet <8408330+tcarmet@users.noreply.github.com> * Apply review suggestion Co-authored-by: Thomas Carmet <8408330+tcarmet@users.noreply.github.com> --------- Co-authored-by: Thomas Carmet <8408330+tcarmet@users.noreply.github.com> --- .github/workflows/release.yml | 8 ++- ansible/Readme.md | 50 +++++++++++-------- ansible/deploy-spark-scripts.yml | 4 +- ansible/roles/run-spark-cluster/vars/main.yml | 6 +-- scripts/S3_FSCK/README.md | 6 +-- scripts/S3_FSCK/export_s3_keys.sh | 2 +- 6 files changed, 41 insertions(+), 35 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5dd2e79..563dd0c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -36,13 +36,11 @@ on: run-name: Release ${{ inputs.tag }} env: - image_path: spark-dev/deployment staging_path: ${{ github.workspace }}/staging - registry: registry.scality.com + registry: ghcr.io img: fieldreportservice - image: registry.scality.com/spark-dev/deployment staging_archive: spark-offline-archive-${{ inputs.tag }}${{ inputs.prerelease == true && '-prerelease' || '' }}.run - s3utils_image: registry.scality.com/s3utils/s3utils:1.14.0 + s3utils_image: ghcr.io/scality/s3utils:1.14.6 nginx_image: docker.io/library/nginx:1.21.6-alpine jobs: @@ -66,7 +64,7 @@ jobs: - name: Registry Login uses: docker/login-action@v2.1.0 with: - registry: registry.scality.com + registry: ghcr.io username: ${{ secrets.REGISTRY_LOGIN }} password: ${{ secrets.REGISTRY_PASSWORD }} - name: Ensure makeself is installed diff --git a/ansible/Readme.md b/ansible/Readme.md index cd7e54e..3ff0c29 100644 --- a/ansible/Readme.md +++ b/ansible/Readme.md @@ -109,7 +109,7 @@ Requirements: * Ansible Galaxy community.general collection * A host in the ``[staging]`` group with internet access * SSH Agent/Keys which provide access to the Scality GitHub repository -* Defining the registry.scality.com credentials in inventory or command line +* Defining the ghcr.io credentials in inventory or command line When registry_user and registry_password (lowercase) Ansible variables are defined in the inventory file: @@ -133,8 +133,8 @@ the container host. * Use an SSH Agent with the SSH key added that can connect to the inventory host in the `[staging]` group -* Set the REGISTRY_USER variable to match a registry.scality.com API user -* Set the REGISTRY_PASSWORD variable to matching API password +* Set the REGISTRY_USER variable to your GitHub username. +* Set the REGISTRY_PASSWORD variable to a [GitHub PAT](https://github.com/settings/tokens) with `packages:read` permissions. When using docker set environment variables in all upper case. Inside the inventory file you will see the same variables in all lower case. @@ -147,19 +147,27 @@ Agent and/or Keys to the container. 1. Pulling the spark-deployment image from registry ```commandline - [docker|podman] pull registry.scality.com/spark/spark-deployment:latest + [docker|podman] pull ghcr.io/scality/spark/spark-deployment:latest ``` -2. Build the spark-deployment image +2. The spark-deployment image - ```commandline - cd spark/ansible - [docker|podman] build . -f Containerfile -t registry.scality.com/spark/spark-deployment:latest - ``` + * Pull a published image if there are no changes to spark/ansible + + ```commandline + [docker|podman] pull ghcr.io/scality/spark/spark-deployment:latest + ``` + + * Build the image from scratch if spark/ansible has been modified + + ```commandline + cd spark/ansible + [docker|podman] build . -f Containerfile -t ghcr.io/scality/spark/spark-deployment:latest + ``` -3. Using Podman generate the offline archive +3. Generate the offline archive ```commandline -podman run --privileged \ +[docker|podman] run --privileged \ --rm \ --net host \ -i -t \ @@ -168,9 +176,9 @@ podman run --privileged \ -e "SSH_AUTH_SOCK=/ssh-agent" \ --volume ${SSH_AUTH_SOCK}:/ssh-agent \ -e "REGISTRY_USER=User_Name" \ - -e "REGISTRY_PASSWORD=" \ + -e "REGISTRY_PASSWORD=" \ -v ~/.ssh:/ansible/.ssh:rw \ - registry.scality.com/spark/spark-deployment:latest \ + ghcr.io/scality/spark/spark-deployment:latest \ stage ``` @@ -204,18 +212,18 @@ skipping: [localhost] TASK [stage-spark-cluster : Archive the spark repository into the staging directory] *************************************************** changed: [localhost] -TASK [stage-spark-cluster : Login to the registry registry.scality.com] **************************************************************** +TASK [stage-spark-cluster : Login to the registry ghcr.io] **************************************************************** changed: [localhost] -TASK [stage-spark-cluster : Pull containers from registry registry.scality.com] ******************************************************** -changed: [localhost] => (item=registry.scality.com/spark/spark-master:latest) -changed: [localhost] => (item=registry.scality.com/spark/spark-worker:latest) -changed: [localhost] => (item=registry.scality.com/s3utils/s3utils:1.12.5) +TASK [stage-spark-cluster : Pull containers from registry ghcr.io] ******************************************************** +changed: [localhost] => (item=ghcr.io/scality/spark/spark-master:latest) +changed: [localhost] => (item=ghcr.io/scality/spark/spark-worker:latest) +changed: [localhost] => (item=ghcr.io/scality/s3utils:1.14.6) TASK [stage-spark-cluster : Save the images into the staging directory] **************************************************************** -changed: [localhost] => (item=registry.scality.com/spark/spark-master:latest) -changed: [localhost] => (item=registry.scality.com/spark/spark-worker:latest) -changed: [localhost] => (item=registry.scality.com/s3utils/s3utils:1.12.5) +changed: [localhost] => (item=ghcr.io/scality/spark/spark-master:latest) +changed: [localhost] => (item=ghcr.io/scality/spark/spark-worker:latest) +changed: [localhost] => (item=ghcr.io/scality/s3utils:1.14.6) TASK [stage-spark-cluster : Generate setup.sh for makeself] **************************************************************************** changed: [localhost] diff --git a/ansible/deploy-spark-scripts.yml b/ansible/deploy-spark-scripts.yml index d236cc8..8cc09f3 100644 --- a/ansible/deploy-spark-scripts.yml +++ b/ansible/deploy-spark-scripts.yml @@ -1,8 +1,8 @@ --- - hosts: sparkmaster:sparkworkers vars: - sparkmaster_image: registry.scality.com/scality/spark/spark-master:latest - sparkworker_image: registry.scality.com/scality/spark/spark-worker:latest + sparkmaster_image: ghcr.io/scality/spark/spark-master:latest + sparkworker_image: ghcr.io/scality/spark/spark-worker:latest tasks: - name: Deploy the spark_start.sh script template: diff --git a/ansible/roles/run-spark-cluster/vars/main.yml b/ansible/roles/run-spark-cluster/vars/main.yml index 8dd6408..bc76649 100644 --- a/ansible/roles/run-spark-cluster/vars/main.yml +++ b/ansible/roles/run-spark-cluster/vars/main.yml @@ -7,7 +7,7 @@ container_name: 'scality-spark-worker' -master_container_image: registry.scality.com/spark/spark-master:latest -worker_container_image: registry.scality.com/spark/spark-worker:latest -s3utils_container_image: registry.scality.com/s3utils/s3utils:1.12.5 +master_container_image: ghcr.io/scality/spark/spark-master:latest +worker_container_image: ghcr.io/scality/spark/spark-worker:latest +s3utils_container_image: ghcr.io/scality/s3utils:1.14.6 nginx_container_image: docker.io/library/nginx:1.21.6-alpine diff --git a/scripts/S3_FSCK/README.md b/scripts/S3_FSCK/README.md index decfee8..80f32c2 100644 --- a/scripts/S3_FSCK/README.md +++ b/scripts/S3_FSCK/README.md @@ -144,10 +144,10 @@ arc_protection: 8+4 ### Primary method -* Pull the s3utils:1.12.5 or newer container image. +* Pull the s3utils:1.14.6 or newer container image. ``` -# docker pull registry.scality.com/s3utils/s3utils:1.12.5 +# docker pull ghcr.io/scality/s3utils:1.14.6 ``` ### Alternative method @@ -223,7 +223,7 @@ do -e "BUCKETS=${bucket}" \ -e 'NO_MISSING_KEY_CHECK=1' \ -e 'VERBOSE=1' \ - scality/s3utils:1.12.5 \ + ghcr.io/scality/s3utils:1.14.6 \ verifyBucketSproxydKeys.js \ | jq -r "[. | select(.message | contains(\"sproxyd key\")) + {\"bucket\": .objectUrl } | .bucket |= sub(\"s3://(?.*)/.*\"; \"\(.bname)\") | .objectUrl |= sub(\"s3://.*/(?.*)$\"; \"\(.oname)\") | .bucket, .objectUrl, .sproxydKey] | @csv" \ > ${WORKDIR}/${bucket}_keys.txt diff --git a/scripts/S3_FSCK/export_s3_keys.sh b/scripts/S3_FSCK/export_s3_keys.sh index 718d51c..c0541f0 100644 --- a/scripts/S3_FSCK/export_s3_keys.sh +++ b/scripts/S3_FSCK/export_s3_keys.sh @@ -21,7 +21,7 @@ do -e "BUCKETS=${bucket}" \ -e 'NO_MISSING_KEY_CHECK=1' \ -e 'VERBOSE=1' \ - registry.scality.com/s3utils/s3utils:1.14.0 \ + ghcr.io/scality/s3utils:1.14.6 \ verifyBucketSproxydKeys.js \ > ${WORKDIR}/raw_${bucket}_keys.txt