forked from volcano-sh/volcano
-
Notifications
You must be signed in to change notification settings - Fork 0
100 lines (97 loc) · 3.7 KB
/
e2e_spark.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
name: E2E Spark Integration Test
on:
# Post check in below branches
push:
branches:
- master
# PR triggered job
pull_request:
jobs:
k8s-integration-tests:
name: "E2E about Spark Integration test"
runs-on: ubuntu-20.04
steps:
- name: Checkout current Volcano repository
if: github.event.inputs.volcano-branch==''
uses: actions/checkout@v3
- name: Checkout Spark repository
uses: actions/checkout@v3
with:
fetch-depth: 0
repository: apache/spark
ref: branch-3.4
path: ${{ github.workspace }}/spark
- name: Cache Scala, SBT and Maven
uses: actions/cache@v3
with:
path: |
build/apache-maven-*
build/scala-*
build/*.jar
~/.sbt
key: build-${{ hashFiles('**/pom.xml', 'project/build.properties', 'build/mvn', 'build/sbt', 'build/sbt-launch-lib.bash', 'build/spark-build-info') }}
restore-keys: |
build-
- name: Cache Coursier local repository
uses: actions/cache@v3
with:
path: ~/.cache/coursier
key: k8s-integration-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
restore-keys: |
k8s-integration-coursier-
- name: Install Java 8
uses: actions/setup-java@v3
with:
distribution: temurin
java-version: 8
- name: Install Go
uses: actions/setup-go@v4
with:
go-version: 1.22.x
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
- name: start minikube
run: |
# Use pre-install minikube
minikube start --cpus max --memory max
- name: Build lastest volcano images
run: |
eval $(minikube docker-env)
make TAG=latest update-development-yaml
sed -i 's/imagePullPolicy: Always/imagePullPolicy: IfNotPresent/g' installer/volcano-development.yaml
make TAG=latest images
docker images | grep volcano
cat ./installer/volcano-development.yaml | grep image:
minikube kubectl -- replace --force -f ./installer/volcano-development.yaml
- name: Show all K8S pods and nodes
run: |
# Use minikube kubectl
minikube kubectl -- get pods -A
minikube kubectl -- get nodes -oyaml
- name: Run K8S integration test
run: |
eval $(minikube docker-env)
minikube kubectl -- create clusterrolebinding serviceaccounts-cluster-admin --clusterrole=cluster-admin --group=system:serviceaccounts || true
build/sbt -Pvolcano -Pkubernetes -Pkubernetes-integration-tests -Dspark.kubernetes.test.driverRequestCores=0.5 -Dspark.kubernetes.test.executorRequestCores=0.2 -Dspark.kubernetes.test.volcanoMaxConcurrencyJobNum=1 -Dtest.include.tags=volcano "kubernetes-integration-tests/test"
working-directory: ${{ github.workspace }}/spark
- name: Collect Volcano logs
if: failure()
run: |
kubectl logs $(kubectl get po -nvolcano-system | grep volcano-scheduler |cut -d" " -f1) -nvolcano-system > volcano-scheduler.log
kubectl logs $(kubectl get po -nvolcano-system | grep volcano-admission |cut -d" " -f1 | grep -v init) -nvolcano-system > volcano-admission.log
kubectl logs $(kubectl get po -nvolcano-system | grep volcano-controllers |cut -d" " -f1) -nvolcano-system > volcano-controllers.log
kubectl get pod -A
kubectl describe node
- name: Upload Spark on K8S integration tests log files
if: failure()
uses: actions/upload-artifact@v4
with:
name: spark-on-kubernetes-it-log
path: |
**/target/integration-tests.log
**/volcano-*.log
- name: Cleanup minikube
if: always()
run: |
minikube delete