@@ -184,104 +184,104 @@ jobs:
184
184
env :
185
185
CR_TOKEN : ' ${{ secrets.GITHUB_TOKEN }}'
186
186
187
- test-otomi-release :
188
- name : Test Helm Chart Installation
189
- needs : [release, chart-release]
190
- runs-on : ubuntu-latest
191
- steps :
192
- - name : Use Scaleway CLI
193
- uses : scaleway/action-scw@v0
194
- with :
195
- save-config : true
196
- export-config : true
197
- version : v2.26.0
198
- access-key : ${{ secrets.SCW_ACCESS_KEY }}
199
- secret-key : ${{ secrets.SCW_SECRET_KEY }}
200
- default-project-id : ${{ secrets.SCW_DEFAULT_PROJECT_ID }}
201
- default-organization-id : ${{ secrets.SCW_DEFAULT_ORGANIZATION_ID }}
202
- - name : Pulling the helm chart
203
- run : |
204
- # Install and update helm repo
205
- helm repo add apl https://linode.github.io/apl-core
206
- helm repo update
187
+ # test-otomi-release:
188
+ # name: Test Helm Chart Installation
189
+ # needs: [release, chart-release]
190
+ # runs-on: ubuntu-latest
191
+ # steps:
192
+ # - name: Use Scaleway CLI
193
+ # uses: scaleway/action-scw@v0
194
+ # with:
195
+ # save-config: true
196
+ # export-config: true
197
+ # version: v2.26.0
198
+ # access-key: ${{ secrets.SCW_ACCESS_KEY }}
199
+ # secret-key: ${{ secrets.SCW_SECRET_KEY }}
200
+ # default-project-id: ${{ secrets.SCW_DEFAULT_PROJECT_ID }}
201
+ # default-organization-id: ${{ secrets.SCW_DEFAULT_ORGANIZATION_ID }}
202
+ # - name: Pulling the helm chart
203
+ # run: |
204
+ # # Install and update helm repo
205
+ # helm repo add apl https://linode.github.io/apl-core
206
+ # helm repo update
207
207
208
- # Get latest version of otomi
209
- latest_version=$(helm search repo otomi -l | grep -m 1 otomi | awk '{print $2}')
210
- echo The latest version to be tested is: $latest_version
211
- - name : Creating the cluster
212
- run : |
213
- # Create cluster private network and get ID
214
- SCALEWAY_PRIVATE_NETWORK_ID=$(scw vpc private-network create project-id=${{ secrets.SCW_DEFAULT_PROJECT_ID }} name='otomi-test-release' region=nl-ams -ojson | jq -r .id)
208
+ # # Get latest version of otomi
209
+ # latest_version=$(helm search repo otomi -l | grep -m 1 otomi | awk '{print $2}')
210
+ # echo The latest version to be tested is: $latest_version
211
+ # - name: Creating the cluster
212
+ # run: |
213
+ # # Create cluster private network and get ID
214
+ # SCALEWAY_PRIVATE_NETWORK_ID=$(scw vpc private-network create project-id=${{ secrets.SCW_DEFAULT_PROJECT_ID }} name='otomi-test-release' region=nl-ams -ojson | jq -r .id)
215
215
216
- # Get k8s 1.27 patch version
217
- K8s_VERSION=$(scw k8s version list -o json | jq -ce '.[] | .name' -r | grep 1.27)
216
+ # # Get k8s 1.27 patch version
217
+ # K8s_VERSION=$(scw k8s version list -o json | jq -ce '.[] | .name' -r | grep 1.27)
218
218
219
- # Create cluster
220
- scw k8s cluster create \
221
- name=otomi-test-release \
222
- pools.0.node-type=PRO2-M \
223
- private-network-id=$SCALEWAY_PRIVATE_NETWORK_ID \
224
- auto-upgrade.enable=false \
225
- cni=calico \
226
- pools.0.name=otomi-test-release \
227
- pools.0.size=3 \
228
- pools.0.max-size=3 \
229
- pools.0.autohealing=true \
230
- pools.0.autoscaling=true \
231
- pools.0.root-volume-size=50GB \
232
- version=$K8s_VERSION \
233
- region=nl-ams \
234
- project-id=${{ secrets.SCW_DEFAULT_PROJECT_ID }} \
235
- --wait
236
- echo "Cluster deployed successfully"
237
- - name : Installing new otomi release
238
- run : |
239
- # Get cluster ID and set env var
240
- cluster_id=$(scw k8s cluster list region=nl-ams -o json | jq -r '.[] | select(.name == "otomi-test-release") | .id')
241
- echo "Cluster ID: $cluster_id"
242
- echo SCALEWAY_CLUSTER_ID=$cluster_id >> $GITHUB_ENV
219
+ # # Create cluster
220
+ # scw k8s cluster create \
221
+ # name=otomi-test-release \
222
+ # pools.0.node-type=PRO2-M \
223
+ # private-network-id=$SCALEWAY_PRIVATE_NETWORK_ID \
224
+ # auto-upgrade.enable=false \
225
+ # cni=calico \
226
+ # pools.0.name=otomi-test-release \
227
+ # pools.0.size=3 \
228
+ # pools.0.max-size=3 \
229
+ # pools.0.autohealing=true \
230
+ # pools.0.autoscaling=true \
231
+ # pools.0.root-volume-size=50GB \
232
+ # version=$K8s_VERSION \
233
+ # region=nl-ams \
234
+ # project-id=${{ secrets.SCW_DEFAULT_PROJECT_ID }} \
235
+ # --wait
236
+ # echo "Cluster deployed successfully"
237
+ # - name: Installing new otomi release
238
+ # run: |
239
+ # # Get cluster ID and set env var
240
+ # cluster_id=$(scw k8s cluster list region=nl-ams -o json | jq -r '.[] | select(.name == "otomi-test-release") | .id')
241
+ # echo "Cluster ID: $cluster_id"
242
+ # echo SCALEWAY_CLUSTER_ID=$cluster_id >> $GITHUB_ENV
243
243
244
- # Get kubeconfig
245
- scw k8s kubeconfig install $cluster_id region=nl-ams
246
- echo "Kubeconfig installed successfully"
244
+ # # Get kubeconfig
245
+ # scw k8s kubeconfig install $cluster_id region=nl-ams
246
+ # echo "Kubeconfig installed successfully"
247
247
248
- # Update values.yaml integration test file
249
- SCALEWAY_CLUSTER_CONTEXT=`kubectl config current-context`
248
+ # # Update values.yaml integration test file
249
+ # SCALEWAY_CLUSTER_CONTEXT=`kubectl config current-context`
250
250
251
- # Install otomi
252
- helm install otomi otomi/otomi \
253
- --wait --wait-for-jobs --timeout 30m0s \
254
- --set cluster.provider=scaleway \
255
- --set cluster.name=otomi-test-release \
256
- --set cluster.k8sContext=$SCALEWAY_CLUSTER_CONTEXT
257
- - name : Gather k8s events on failure
258
- if : failure()
259
- run : |
260
- kubectl get events --sort-by='.lastTimestamp' -A
261
- - name : Gather k8s pods on failure
262
- if : failure()
263
- run : |
264
- kubectl get pods -A -o wide
265
- - name : Gather otomi logs on failure
266
- if : failure()
267
- run : |
268
- kubectl logs jobs/otomi --tail 150
269
- - name : Delete k8s cluster at Scaleway
270
- if : always()
271
- run : |
272
- scw k8s cluster delete ${{ env.SCALEWAY_CLUSTER_ID }} with-additional-resources=true region=nl-ams
251
+ # # Install otomi
252
+ # helm install otomi otomi/otomi \
253
+ # --wait --wait-for-jobs --timeout 30m0s \
254
+ # --set cluster.provider=scaleway \
255
+ # --set cluster.name=otomi-test-release \
256
+ # --set cluster.k8sContext=$SCALEWAY_CLUSTER_CONTEXT
257
+ # - name: Gather k8s events on failure
258
+ # if: failure()
259
+ # run: |
260
+ # kubectl get events --sort-by='.lastTimestamp' -A
261
+ # - name: Gather k8s pods on failure
262
+ # if: failure()
263
+ # run: |
264
+ # kubectl get pods -A -o wide
265
+ # - name: Gather otomi logs on failure
266
+ # if: failure()
267
+ # run: |
268
+ # kubectl logs jobs/otomi --tail 150
269
+ # - name: Delete k8s cluster at Scaleway
270
+ # if: always()
271
+ # run: |
272
+ # scw k8s cluster delete ${{ env.SCALEWAY_CLUSTER_ID }} with-additional-resources=true region=nl-ams
273
273
274
- notification :
275
- needs : [build-test-cache, push-to-docker, release, chart-release]
276
- if : always()
277
- runs-on : ubuntu-latest
278
- steps :
279
- - name : Slack Notification
280
- uses : rtCamp/action-slack-notify@v2
281
- env :
282
- SLACK_WEBHOOK : ${{ secrets.SLACK_WEBHOOK }}
283
- SLACK_CHANNEL : github-ci
284
- SLACK_COLOR : ${{ job.status }}
285
- SLACK_ICON : https://github.com/redkubes.png?size=48
286
- SLACK_TITLE : CI run
287
- SLACK_USERNAME : RedKubesBot
274
+ # notification:
275
+ # needs: [build-test-cache, push-to-docker, release, chart-release]
276
+ # if: always()
277
+ # runs-on: ubuntu-latest
278
+ # steps:
279
+ # - name: Slack Notification
280
+ # uses: rtCamp/action-slack-notify@v2
281
+ # env:
282
+ # SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
283
+ # SLACK_CHANNEL: github-ci
284
+ # SLACK_COLOR: ${{ job.status }}
285
+ # SLACK_ICON: https://github.com/redkubes.png?size=48
286
+ # SLACK_TITLE: CI run
287
+ # SLACK_USERNAME: RedKubesBot
0 commit comments