forked from arthurcesarino/spark_on_k8s_gcp
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathexecute_spark_job.yaml
48 lines (48 loc) · 1.22 KB
/
execute_spark_job.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
apiVersion: "sparkoperator.k8s.io/v1beta2"
kind: SparkApplication
metadata:
name: pyspark-job-gcp1
namespace: processing
spec:
type: Python
mode: cluster
image: arthexbr77/pyspark_gcp_connectors:3.1.1
imagePullPolicy: Always
mainApplicationFile: 'local:///app/spark_job.py'
sparkVersion: '3.1.1'
restartPolicy:
type: OnFailure
onFailureRetries: 3
onFailureRetryInterval: 10
onSubmissionFailureRetries: 5
onSubmissionFailureRetryInterval: 20
hadoopConf:
'fs.gs.project.id': 'gcp-f1-pipeline'
'fs.gs.system.bucket': 'dp-landing-zone'
"google.cloud.auth.service.account.enable": "true"
'google.cloud.auth.service.account.json.keyfile': '/mnt/secrets/key.json'
driver:
coreRequest: 250m
coreLimit: '1200m'
memory: '512m'
secrets:
- name: 'gcs-key'
path: '/mnt/secrets'
secretType: GCPServiceAccount
serviceAccount: default
labels:
version: 3.0.0
envVars:
GCS_PROJECT_ID: gcp-f1-pipeline
executor:
coreRequest: 250m
instances: 2
memory: '512m'
secrets:
- name: 'gcs-key'
path: '/mnt/secrets'
secretType: GCPServiceAccount
labels:
version: 3.0.0
envVars:
GCS_PROJECT_ID: gcp-f1-pipeline