Skip to content

chore(main): Release transformation-k8s-compliance-premium v0.6.8 #2415

chore(main): Release transformation-k8s-compliance-premium v0.6.8

chore(main): Release transformation-k8s-compliance-premium v0.6.8 #2415

name: Validate Transformation
on:
pull_request:
branches:
- main
env:
SNOW_USER: ${{ secrets.SNOW_USER }}
SNOW_PASSWORD: ${{ secrets.SNOW_PASSWORD }}
# DBT assumes the account is in the form of <account>.<region>
SNOW_ACCOUNT: "${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}"
SNOW_WAREHOUSE: ${{ secrets.SNOW_WAREHOUSE }}
SNOW_DATABASE: ${{ secrets.SNOW_DATABASE }}
SNOW_SCHEMA: ${{ secrets.SNOW_SCHEMA }}
SNOW_REGION: ${{ secrets.SNOW_REGION }}
CLOUDQUERY_API_KEY: ${{ secrets.CLOUDQUERY_API_KEY }}
jobs:
prepare:
runs-on: ubuntu-latest
if: startsWith(github.head_ref, 'release-please--branches--main--components--transformation-')
outputs:
transformation_dir: ${{ fromJson(steps.set-result.outputs.result).transformation_dir }}
zipPath: ${{ fromJson(steps.set-result.outputs.result).zipPath }}
postgres: ${{ fromJson(steps.set-result.outputs.result).postgres }}
snowflake: ${{ fromJson(steps.set-result.outputs.result).snowflake }}
bigquery: ${{ fromJson(steps.set-result.outputs.result).bigquery }}
s3: ${{ fromJson(steps.set-result.outputs.result).s3 }}
clickhouse: ${{ fromJson(steps.set-result.outputs.result).clickhouse }}
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/github-script@v7
id: set-result
env:
PR_BRANCH: ${{ github.head_ref }}
with:
script: |
const path = require('path');
const fs = require('fs/promises');
const { PR_BRANCH } = process.env;
const cleanBranch = PR_BRANCH.replace('release-please--branches--main--components--transformation-', '');
const [topLevelDir, ...rest] = cleanBranch.split('-');
const transformation_dir = `transformations/${topLevelDir}/${rest.join('-')}`;
const manifestFile = `${transformation_dir}/manifest.json`;
const zipPath = JSON.parse(await fs.readFile(manifestFile)).path;
const [postgres, snowflake, bigquery, s3, clickhouse] = await Promise.all([
fs.access(`${transformation_dir}/tests/postgres.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/snowflake.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/bigquery.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/s3.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/clickhouse.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
]);
return {
transformation_dir,
zipPath: path.resolve(transformation_dir, zipPath),
postgres,
snowflake,
bigquery,
s3,
clickhouse,
};
validate-transformation:
permissions:
id-token: 'write'
contents: 'read'
needs: prepare
runs-on: ubuntu-latest
services:
postgres:
image: postgres:11
env:
POSTGRES_PASSWORD: pass
POSTGRES_USER: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout
uses: actions/checkout@v4
# Required for BigQuery targets
- name: Authenticate to Google Cloud
uses: 'google-github-actions/auth@v2'
with:
workload_identity_provider: 'projects/151868820337/locations/global/workloadIdentityPools/integration-test-pool/providers/integration-test-provider'
service_account: 'integration-service-account@cq-integration-tests.iam.gserviceaccount.com'
# Required for athena targets
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: arn:aws:iam::615713231484:role/cq-playground-aws-github-action
aws-region: us-east-1
- uses: actions/setup-python@v5
with:
python-version: "3.9"
cache: "pip"
cache-dependency-path: "./${{ needs.prepare.outputs.transformation_dir }}/requirements.txt"
- name: Install dependencies
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
run: pip install -r requirements.txt
- name: Setup CloudQuery
uses: cloudquery/setup-cloudquery@v4
with:
version: v6.11.0
- name: Migrate DB Postgres
if: needs.prepare.outputs.postgres == 'true'
run: cloudquery migrate tests/postgres.yml
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
env:
CQ_DSN: postgresql://postgres:pass@localhost:5432/postgres
- name: Migrate DB Snowflake
if: needs.prepare.outputs.snowflake == 'true'
run: cloudquery migrate tests/snowflake.yml
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
env:
SNOWFLAKE_CONNECTION_STRING: "${{ secrets.SNOW_USER }}:${{ secrets.SNOW_PASSWORD }}@${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}/${{ secrets.SNOW_DATABASE }}/${{ secrets.SNOW_SCHEMA }}?warehouse=${{ secrets.SNOW_WAREHOUSE }}"
- name: Migrate DB BigQuery
if: needs.prepare.outputs.bigquery == 'true'
run: cloudquery migrate tests/bigquery.yml
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
- name: Migrate DB Athena
if: needs.prepare.outputs.s3 == 'true'
run: cloudquery migrate tests/s3.yml
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
- name: Start ClickHouse Container
if: needs.prepare.outputs.clickhouse == 'true'
run: |
docker run --platform linux/amd64 -d --name clickhouse-server --rm -p 8123:8123 -p 9000:9000 \
-e CLICKHOUSE_PASSWORD=test \
-e CLICKHOUSE_USER=cq \
-e CLICKHOUSE_DB=cloudquery \
-e CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT=1 \
-v ${{ github.workspace }}/.github/clickhouse.xml:/etc/clickhouse-server/users.d/cloudquery.xml \
clickhouse/clickhouse-server:22.1.2
sudo apt update && sudo apt install wait-for-it -y
wait-for-it -h localhost -p 9000
- name: Migrate DB Clickhouse
if: needs.prepare.outputs.clickhouse == 'true'
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
env:
CLICKHOUSE_CONNECTION_STRING: "clickhouse://cq:test@localhost:9000/cloudquery"
run: cloudquery migrate tests/clickhouse.yml
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 'lts/*'
cache: 'npm'
cache-dependency-path: scripts/dbt-pack/package-lock.json
- name: Install Dependencies
run: npm ci
working-directory: ./scripts/dbt-pack
- name: Pack DBT
working-directory: ./scripts/dbt-pack
run: node index.js dbt-pack --project-dir=../../${{ needs.prepare.outputs.transformation_dir }}
- name: Unzip Packed DBT
run: |
mkdir -p temp/build
unzip -o ${{ needs.prepare.outputs.zipPath }} -d temp/build
cp -r ${{ needs.prepare.outputs.transformation_dir }}/seeds/ temp/build
- name: Run Unpacked DBT Postgres
if: needs.prepare.outputs.postgres == 'true'
working-directory: ./temp/build
run: |
dbt seed --target dev-pg --profiles-dir ../../${{ needs.prepare.outputs.transformation_dir }}/tests
dbt run --target dev-pg --profiles-dir ../../${{ needs.prepare.outputs.transformation_dir }}/tests
- name: Run Unpacked DBT Snowflake
if: needs.prepare.outputs.snowflake == 'true'
working-directory: ./temp/build
run: |
dbt seed --target dev-snowflake --profiles-dir ../../${{ needs.prepare.outputs.transformation_dir }}/tests
dbt run --target dev-snowflake --profiles-dir ../../${{ needs.prepare.outputs.transformation_dir }}/tests
- name: Run Unpacked DBT BigQuery
if: needs.prepare.outputs.bigquery == 'true'
working-directory: ./temp/build
run: |
dbt seed --target dev-bigquery --profiles-dir ../../${{ needs.prepare.outputs.transformation_dir }}/tests
dbt run --target dev-bigquery --profiles-dir ../../${{ needs.prepare.outputs.transformation_dir }}/tests