diff --git a/contrib/spark/Makefile b/contrib/spark/Makefile index 9f0767df8..0a1d2987e 100644 --- a/contrib/spark/Makefile +++ b/contrib/spark/Makefile @@ -1,4 +1,4 @@ -SPARK_OPERATOR_RELEASE_VERSION ?= 2.0.1 +SPARK_OPERATOR_RELEASE_VERSION ?= 2.0.2 SPARK_OPERATOR_HELM_CHART_REPO ?= https://kubeflow.github.io/spark-operator .PHONY: spark-operator/base diff --git a/contrib/spark/spark-operator/base/resources.yaml b/contrib/spark/spark-operator/base/resources.yaml index 39ecc7e07..3bace9443 100644 --- a/contrib/spark/spark-operator/base/resources.yaml +++ b/contrib/spark/spark-operator/base/resources.yaml @@ -1,5 +1,5 @@ --- -# Source: spark-operator/crds/sparkoperator.k8s.io_scheduledsparkapplications.yaml +# Source: crds/sparkoperator.k8s.io_scheduledsparkapplications.yaml --- apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition @@ -9607,8 +9607,8 @@ spec: additionalProperties: type: string description: |- - HadoopConf carries user-specified Hadoop configuration properties as they would use the the "--conf" option - in spark-submit. The SparkApplication controller automatically adds prefix "spark.hadoop." to Hadoop + HadoopConf carries user-specified Hadoop configuration properties as they would use the "--conf" option + in spark-submit. The SparkApplication controller automatically adds prefix "spark.hadoop." to Hadoop configuration properties. type: object hadoopConfigMap: @@ -11624,7 +11624,7 @@ spec: status: {} --- -# Source: spark-operator/crds/sparkoperator.k8s.io_sparkapplications.yaml +# Source: crds/sparkoperator.k8s.io_sparkapplications.yaml --- apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition @@ -21141,8 +21141,8 @@ spec: additionalProperties: type: string description: |- - HadoopConf carries user-specified Hadoop configuration properties as they would use the the "--conf" option - in spark-submit. The SparkApplication controller automatically adds prefix "spark.hadoop." to Hadoop + HadoopConf carries user-specified Hadoop configuration properties as they would use the "--conf" option + in spark-submit. The SparkApplication controller automatically adds prefix "spark.hadoop." to Hadoop configuration properties. type: object hadoopConfigMap: @@ -23190,10 +23190,10 @@ metadata: name: spark-operator-controller namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: controller --- @@ -23204,10 +23204,10 @@ metadata: name: spark-operator-spark namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm --- # Source: spark-operator/templates/webhook/serviceaccount.yaml @@ -23217,10 +23217,10 @@ metadata: name: spark-operator-webhook namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: webhook --- @@ -23231,10 +23231,10 @@ metadata: name: spark-operator-controller namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: controller rules: @@ -23266,10 +23266,10 @@ metadata: name: spark-operator-webhook namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: webhook rules: @@ -23307,10 +23307,10 @@ metadata: name: spark-operator-controller namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: controller subjects: @@ -23329,10 +23329,10 @@ metadata: name: spark-operator-webhook namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: webhook subjects: @@ -23351,10 +23351,10 @@ metadata: name: spark-operator-controller namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: controller rules: @@ -23463,10 +23463,10 @@ metadata: name: spark-operator-spark namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm rules: - apiGroups: @@ -23493,10 +23493,10 @@ metadata: name: spark-operator-webhook namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: webhook rules: @@ -23571,10 +23571,10 @@ metadata: name: spark-operator-controller namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: controller subjects: @@ -23593,10 +23593,10 @@ metadata: name: spark-operator-spark namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm subjects: - kind: ServiceAccount @@ -23614,10 +23614,10 @@ metadata: name: spark-operator-webhook namespace: default labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: webhook subjects: @@ -23635,10 +23635,10 @@ kind: Service metadata: name: spark-operator-webhook-svc labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: webhook spec: @@ -23657,10 +23657,10 @@ kind: Deployment metadata: name: spark-operator-controller labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: controller spec: @@ -23683,7 +23683,7 @@ spec: spec: containers: - name: spark-operator-controller - image: docker.io/kubeflow/spark-operator:2.0.1 + image: docker.io/kubeflow/spark-operator:2.0.2 imagePullPolicy: IfNotPresent args: - controller @@ -23700,6 +23700,9 @@ spec: - --leader-election=true - --leader-election-lock-name=spark-operator-controller-lock - --leader-election-lock-namespace=default + - --workqueue-ratelimiter-bucket-qps=50 + - --workqueue-ratelimiter-bucket-size=500 + - --workqueue-ratelimiter-max-delay=6h ports: - name: "metrics" containerPort: 8080 @@ -23721,10 +23724,10 @@ kind: Deployment metadata: name: spark-operator-webhook labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: webhook spec: @@ -23743,7 +23746,7 @@ spec: spec: containers: - name: spark-operator-webhook - image: docker.io/kubeflow/spark-operator:2.0.1 + image: docker.io/kubeflow/spark-operator:2.0.2 imagePullPolicy: IfNotPresent args: - webhook @@ -23788,10 +23791,10 @@ kind: MutatingWebhookConfiguration metadata: name: spark-operator-webhook labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: webhook webhooks: @@ -23871,10 +23874,10 @@ kind: ValidatingWebhookConfiguration metadata: name: spark-operator-webhook labels: - helm.sh/chart: spark-operator-2.0.1 + helm.sh/chart: spark-operator-2.0.2 app.kubernetes.io/name: spark-operator app.kubernetes.io/instance: spark-operator - app.kubernetes.io/version: "2.0.1" + app.kubernetes.io/version: "2.0.2" app.kubernetes.io/managed-by: Helm app.kubernetes.io/component: webhook webhooks: diff --git a/contrib/spark/sparkapplication_example.yaml b/contrib/spark/sparkapplication_example.yaml index 7a789a121..1ddaa87ba 100644 --- a/contrib/spark/sparkapplication_example.yaml +++ b/contrib/spark/sparkapplication_example.yaml @@ -2,7 +2,6 @@ apiVersion: sparkoperator.k8s.io/v1beta2 kind: SparkApplication metadata: name: spark-pi-python - namespace: default spec: type: Python pythonVersion: "3" @@ -21,3 +20,5 @@ spec: instances: 1 cores: 1 memory: 512m + annotations: + "sidecar.istio.io/inject": "false" diff --git a/contrib/spark/test.sh b/contrib/spark/test.sh index 7962962dc..d5397877c 100755 --- a/contrib/spark/test.sh +++ b/contrib/spark/test.sh @@ -28,7 +28,7 @@ done echo "Namespace $NAMESPACE has been created!" -kubectl label namespace $NAMESPACE istio-injection=enabled +kubectl label namespace $NAMESPACE istio-injection=enabled --overwrite kubectl get namespaces --selector=istio-injection=enabled @@ -36,20 +36,20 @@ kubectl get namespaces --selector=istio-injection=enabled kustomize build spark-operator/overlays/standalone | kubectl -n kubeflow apply --server-side -f - # Wait for the operator to be ready. -kubectl -n kubeflow wait --for=condition=available --timeout=600s deploy/spark-operator -kubectl -n kubeflow get pod -l app.kubernetes.io/component=spark-operator +kubectl -n kubeflow wait --for=condition=available --timeout=600s deploy/spark-operator-controller +kubectl -n kubeflow get pod -l app.kubernetes.io/name=spark-operator # Install Spark components kubectl -n $NAMESPACE apply -f sparkapplication_example.yaml # Wait for the Spark to be ready. sleep 5 -kubectl -n $NAMESPACE wait --for=condition=ready pod -l sparkoperator.k8s.io/sparkapplication=kubeflow-sparkapplication --timeout=900s -kubectl -n $NAMESPACE logs -l sparkoperator.k8s.io/sparkapplication=kubeflow-sparkapplication,sparkoperator.k8s.io/node-type=head +kubectl -n $NAMESPACE wait --for=condition=ready pod -l sparkoperator.k8s.io/sparkapplication=spark-pi-python --timeout=900s +kubectl -n $NAMESPACE logs -l sparkoperator.k8s.io/sparkapplication=spark-pi-python, sparkoperator.k8s.io/node-type=head # Forward the port of Spark UI sleep 5 -kubectl -n $NAMESPACE port-forward --address 0.0.0.0 svc/kubeflow-sparkapplication-head-svc 4040 :4040 & +kubectl -n $NAMESPACE port-forward --address 0.0.0.0 svc/spark-pi-python-head-svc 4040 :4040 & PID=$! echo "Forward the port 4040 of Spark head in the background process: $PID"