-
Notifications
You must be signed in to change notification settings - Fork 1
/
staroid.yaml
53 lines (49 loc) · 2.47 KB
/
staroid.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
apiVersion: beta/v1
build:
skaffold:
file: skaffold.yaml
ingress:
- serviceName: spark-ui
port: 4040
deploy:
dependencies:
- project: open-datastudio/hive-metastore
level: REQUIRED
implements: open-datastudio/spark-thriftserver
paramGroups:
- name: Spark configurations
collapsed: true
params:
- name: spark-image
type: STRING
description: Spark docker image. https://hub.docker.com/repository/docker/opendatastudio/spark
defaultValue: opendatastudio/spark:v3.1.0-snapshot-20200618-01
paths:
- Deployment:spark-thriftserver:spec.template.spec.containers[0].image
- Deployment:beeline:spec.template.spec.containers[0].image
- ConfigMap:spark-conf:data.spark-container-image
- name: spark-defaults.conf
type: STRING
description: Spark properties
defaultValue: |
spark.executor.cores 4
spark.executor.memory 8g
spark.dynamicAllocation.enabled true
spark.dynamicAllocation.minExecutors 1
spark.dynamicAllocation.maxExecutors 10
spark.dynamicAllocation.initialExecutors 1
spark.dynamicAllocation.shuffleTracking.enabled true
spark.dynamicAllocation.executorIdleTimeout 600s
spark.dynamicAllocation.schedulerBacklogTimeout 60s
# 'dedicated' for dedicated kubernetes node. 'dedicated' or 'sandboxed'
spark.kubernetes.executor.label.pod.staroid.com/isolation dedicated
# 'standard-2', 'standard-4', 'standard-8' available. Note that spark.executor.cores|memory need to be changed accordingly.
spark.kubernetes.executor.label.pod.staroid.com/instance-type standard-4
# 'true' for spot instance. 'true' or 'false'
spark.kubernetes.executor.label.pod.staroid.com/spot true
spark.scheduler.mode FAIR
spark.serializer org.apache.spark.serializer.KryoSerializer
spark.sql.adaptive.enabled true
spark.sql.adaptive.coalescePartitions.enabled true
paths:
- ConfigMap:spark-conf:data["spark-defaults.conf"]