Last active
November 14, 2019 15:23
-
-
Save rafaelfelix/5ece1824ac93a0e9aab3e338e69bd6b1 to your computer and use it in GitHub Desktop.
Revisions
-
rafaelfelix revised this gist
Nov 14, 2019 . 1 changed file with 2 additions and 2 deletions.There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -5,11 +5,11 @@ cat > clustermode-podspec.yaml << EOF apiVersion: v1 kind: Pod metadata: name: spark-submit-example spec: serviceAccountName: spark containers: - name: spark-submit-example args: - /opt/spark/bin/spark-submit - --master -
rafaelfelix created this gist
Nov 14, 2019 .There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -0,0 +1,37 @@ #!/bin/bash IMAGE_REPO=$(kubectl get svc registry-docker-registry -o=jsonpath='{.spec.clusterIP}'):5000 cat > clustermode-podspec.yaml << EOF apiVersion: v1 kind: Pod metadata: name: spark-sample-client-mode spec: serviceAccountName: spark containers: - name: spark-submit-client-mode args: - /opt/spark/bin/spark-submit - --master - k8s://https://\$(KUBERNETES_PORT_443_TCP_ADDR):\$(KUBERNETES_PORT_443_TCP_PORT) - --deploy-mode - cluster - --conf - spark.kubernetes.container.image=$IMAGE_REPO/spark - --conf - spark.kubernetes.authenticate.driver.serviceAccountName=spark - --class - org.apache.spark.examples.SparkPi - local:///opt/spark/examples/jars/spark-examples_2.11-2.4.4.jar env: - name: SPARK_HOME value: /opt/spark resources: {} image: $IMAGE_REPO/spark:latest imagePullPolicy: Always EOF # this will start a pod that runs spark-submit # see: https://spark.apache.org/docs/latest/running-on-kubernetes.html#submitting-applications-to-kubernetes # for differences in running in client or cluster mode kubectl apply -f clustermode-podspec.yaml