-
Notifications
You must be signed in to change notification settings - Fork 0
162 lines (160 loc) · 7.35 KB
/
validate_transformation_release.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
name: Validate Transformation
on:
pull_request:
branches:
- main
env:
SNOW_USER: ${{ secrets.SNOW_USER }}
SNOW_PASSWORD: ${{ secrets.SNOW_PASSWORD }}
# DBT assumes the account is in the form of <account>.<region>
SNOW_ACCOUNT: "${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}"
SNOW_WAREHOUSE: ${{ secrets.SNOW_WAREHOUSE }}
SNOW_DATABASE: ${{ secrets.SNOW_DATABASE }}
SNOW_SCHEMA: ${{ secrets.SNOW_SCHEMA }}
SNOW_REGION: ${{ secrets.SNOW_REGION }}
CLOUDQUERY_API_KEY: ${{ secrets.CLOUDQUERY_API_KEY }}
jobs:
prepare:
runs-on: ubuntu-latest
if: startsWith(github.head_ref, 'release-please--branches--main--components--transformation-')
outputs:
transformation_dir: ${{ fromJson(steps.set-result.outputs.result).transformation_dir }}
zipPath: ${{ fromJson(steps.set-result.outputs.result).zipPath }}
postgres: ${{ fromJson(steps.set-result.outputs.result).postgres }}
snowflake: ${{ fromJson(steps.set-result.outputs.result).snowflake }}
bigquery: ${{ fromJson(steps.set-result.outputs.result).bigquery }}
s3: ${{ fromJson(steps.set-result.outputs.result).s3 }}
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/github-script@v7
id: set-result
env:
PR_BRANCH: ${{ github.head_ref }}
with:
script: |
const path = require('path');
const fs = require('fs/promises');
const { PR_BRANCH } = process.env;
const cleanBranch = PR_BRANCH.replace('release-please--branches--main--components--transformation-', '');
const [topLevelDir, ...rest] = cleanBranch.split('-');
const transformation_dir = `transformations/${topLevelDir}/${rest.join('-')}`;
const manifestFile = `${transformation_dir}/manifest.json`;
const zipPath = JSON.parse(await fs.readFile(manifestFile)).path;
const [postgres, snowflake, bigquery, s3] = await Promise.all([
fs.access(`${transformation_dir}/tests/postgres.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/snowflake.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/bigquery.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/s3.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
]);
return {
transformation_dir,
zipPath: path.resolve(transformation_dir, zipPath),
postgres,
snowflake,
bigquery,
s3
};
validate-transformation:
permissions:
id-token: 'write'
contents: 'read'
needs: prepare
runs-on: ubuntu-latest
services:
postgres:
image: postgres:11
env:
POSTGRES_PASSWORD: pass
POSTGRES_USER: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout
uses: actions/checkout@v4
# Required for BigQuery targets
- name: Authenticate to Google Cloud
uses: 'google-github-actions/auth@v2'
with:
workload_identity_provider: 'projects/151868820337/locations/global/workloadIdentityPools/integration-test-pool/providers/integration-test-provider'
service_account: 'integration-service-account@cq-integration-tests.iam.gserviceaccount.com'
# Required for athena targets
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: arn:aws:iam::615713231484:role/cq-playground-aws-github-action
aws-region: us-east-1
- uses: actions/setup-python@v5
with:
python-version: "3.9"
cache: "pip"
cache-dependency-path: "./${{ needs.prepare.outputs.transformation_dir }}/requirements.txt"
- name: Install dependencies
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
run: pip install -r requirements.txt
- name: Setup CloudQuery
uses: cloudquery/setup-cloudquery@v4
with:
version: v6.4.1
- name: Migrate DB Postgres
if: needs.prepare.outputs.postgres == 'true'
run: cloudquery migrate tests/postgres.yml
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
env:
CQ_DSN: postgresql://postgres:pass@localhost:5432/postgres
- name: Migrate DB Snowflake
if: needs.prepare.outputs.snowflake == 'true'
run: cloudquery migrate tests/snowflake.yml
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
env:
SNOWFLAKE_CONNECTION_STRING: "${{ secrets.SNOW_USER }}:${{ secrets.SNOW_PASSWORD }}@${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}/${{ secrets.SNOW_DATABASE }}/${{ secrets.SNOW_SCHEMA }}?warehouse=${{ secrets.SNOW_WAREHOUSE }}"
- name: Migrate DB BigQuery
if: needs.prepare.outputs.bigquery == 'true'
run: cloudquery migrate tests/bigquery.yml
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
- name: Migrate DB Athena
if: needs.prepare.outputs.s3 == 'true'
run: cloudquery migrate tests/s3.yml
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 'lts/*'
cache: 'npm'
cache-dependency-path: scripts/dbt-pack/package-lock.json
- name: Install Dependencies
run: npm ci
working-directory: ./scripts/dbt-pack
- name: Pack DBT
working-directory: ./scripts/dbt-pack
run: node index.js dbt-pack --project-dir=../../${{ needs.prepare.outputs.transformation_dir }}
- name: Unzip Packed DBT
run: |
mkdir -p temp/build
unzip -o ${{ needs.prepare.outputs.zipPath }} -d temp/build
cp -r ${{ needs.prepare.outputs.transformation_dir }}/seeds/ temp/build
- name: Run Unpacked DBT Postgres
if: needs.prepare.outputs.postgres == 'true'
working-directory: ./temp/build
run: |
dbt seed --target dev-pg --profiles-dir ../../${{ needs.prepare.outputs.transformation_dir }}/tests
dbt run --target dev-pg --profiles-dir ../../${{ needs.prepare.outputs.transformation_dir }}/tests
- name: Run Unpacked DBT Snowflake
if: needs.prepare.outputs.snowflake == 'true'
working-directory: ./temp/build
run: |
dbt seed --target dev-snowflake --profiles-dir ../../${{ needs.prepare.outputs.transformation_dir }}/tests
dbt run --target dev-snowflake --profiles-dir ../../${{ needs.prepare.outputs.transformation_dir }}/tests
- name: Run Unpacked DBT BigQuery
if: needs.prepare.outputs.bigquery == 'true'
working-directory: ./temp/build
run: |
dbt seed --target dev-bigquery --profiles-dir ../../${{ needs.prepare.outputs.transformation_dir }}/tests
dbt run --target dev-bigquery --profiles-dir ../../${{ needs.prepare.outputs.transformation_dir }}/tests