-
Notifications
You must be signed in to change notification settings - Fork 8
154 lines (130 loc) · 4.77 KB
/
test.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
name: Scapyd-k8s CI
on:
push:
branches:
- main
pull_request:
jobs:
test-unit:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.11
cache: 'pip'
- name: Install dependencies
run: |
pip install -r requirements.txt
pip install -r requirements-test.txt
- name: Run tests
run: pytest -vv --color=yes scrapyd_k8s/tests/unit/
test-docker:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.11
cache: 'pip'
- name: Install dependencies
run: |
pip install -r requirements.txt
pip install -r requirements-test.txt
- name: Pull example spider
run: docker pull ghcr.io/q-m/scrapyd-k8s-spider-example
- name: Run scrapyd-k8s
run: |
cp scrapyd_k8s.sample-docker.conf scrapyd_k8s.conf
python -m scrapyd_k8s &
while ! nc -q 1 localhost 6800 </dev/null; do sleep 1; done
curl http://localhost:6800/daemonstatus.json
- name: Run tests
run: pytest -vv --color=yes scrapyd_k8s/tests/integration/
test-manifest:
container:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.11
cache: 'pip'
- name: Install dependencies
run: |
pip install -r requirements.txt
pip install -r requirements-test.txt
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build container
uses: docker/build-push-action@v5
with:
context: .
push: false
load: true
tags: test:latest
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Start minikube
uses: medyagh/setup-minikube@master
- name: Deploy to minikube
run: |
minikube image load test:latest
# already pull image so we don't have to wait for it later
minikube image pull ghcr.io/q-m/scrapyd-k8s-spider-example:latest
# load manifest
sed -i 's/\(imagePullPolicy:\s*\)\w\+/\1Never/' kubernetes.yaml
sed -i 's/\(image:\s*\)ghcr\.io\/q-m\/scrapyd-k8s:/\1test:/' kubernetes.yaml
sed -i 's/\(type:\s*\)ClusterIP/\1NodePort/' kubernetes.yaml
kubectl create -f kubernetes.yaml
# and wait for scrapyd-k8s to become ready
kubectl wait --for=condition=Available deploy/scrapyd-k8s --timeout=60s
curl --retry 10 --retry-delay 2 --retry-all-errors `minikube service scrapyd-k8s --url`/daemonstatus.json
- name: Run tests
run: |
TEST_WITH_K8S=1 \
TEST_BASE_URL=`minikube service scrapyd-k8s --url` \
TEST_MAX_WAIT=60 \
TEST_AVAILABLE_VERSIONS=latest,`skopeo list-tags docker://ghcr.io/q-m/scrapyd-k8s-spider-example | jq -r '.Tags | map(select(. != "latest" and (startswith("sha-") | not))) | join(",")'` \
pytest -vv --color=yes scrapyd_k8s/tests/integration/
test-k8s:
container:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.11
cache: 'pip'
- name: Install dependencies
run: |
pip install -r requirements.txt
pip install -r requirements-test.txt
- name: Start minikube
uses: medyagh/setup-minikube@master
- name: Prepare Kubernetes environment
run: |
kubectl create secret generic example-env-secret --from-literal=FOO_1=bar
kubectl create configmap example-env-configmap --from-literal=FOO_2=baz
# already pull image so we don't have to wait for it later
minikube image pull ghcr.io/q-m/scrapyd-k8s-spider-example:latest
- name: Run scrapyd-k8s
run: |
cp scrapyd_k8s.sample-k8s.conf scrapyd_k8s.conf
python -m scrapyd_k8s &
while ! nc -q 1 localhost 6800 </dev/null; do sleep 1; done
curl http://localhost:6800/daemonstatus.json
- name: Run tests
run: |
TEST_WITH_K8S=1 \
TEST_MAX_WAIT=60 \
TEST_AVAILABLE_VERSIONS=latest,`skopeo list-tags docker://ghcr.io/q-m/scrapyd-k8s-spider-example | jq -r '.Tags | map(select(. != "latest" and (startswith("sha-") | not))) | join(",")'` \
pytest -vv --color=yes scrapyd_k8s/tests/integration/