Collectd operator utilties
[demo.git] / vnfs / DAaaS / sample-apps / training / sample-spark-app / values.yaml
1 # Default values for sample-spark-app.
2 # This is a YAML-formatted file.
3 # Declare variables to be passed into your templates.
4
5
6 #===========================KUBERNETES POD RELATED CONFIGs========================
7 image: spark-tf-keras-horo:latest
8 imagePullPolicy: Never
9 restartPolicy: Never
10 volumesName: test-volume
11 hostpath: /tmp
12 hostpathType: Directory
13
14
15
16 #============================SPARK APP RELATED CONFIGs=============================
17
18 nameOfTheSparkApp: spark-apache-logs2 
19 # Python or Scala supported.
20 programmingLanguageType: Scala
21 modeOfSparkApp: cluster
22 mainClassOfTheSparkApp: ApacheLogAnalysis
23 # can be http path, s3 path, minio path
24 mainApplicationFileOfTheSparkApp: https://github.com/mohanraj1311/ApacheLogAnalysisJar/raw/master/analysisofapachelogs_2.11-0.1.jar 
25 argumentsOfTheSparkProgram:
26     - hdfs://hdfs-1-namenode-1.hdfs-1-namenode.hdfs1.svc.cluster.local:8020/data/apache-logs 
27
28
29
30 #============================SPARK DRIVER RELATED CONFIGs=========================
31 driverCores: 0.1
32 driverCoreLimit: 200m
33 driverMemory: 1024m
34 driverVolumeMountsName: test-volume
35 driverVolumeMountPath: /tmp 
36
37
38
39 #============================SPARK EXECUTOR RELATED CONFIGs=======================
40 executorCores: 1 
41 executorInstances: 1 
42 executorMemory: 512m
43 executorVolumeMountsName: test-volume
44 executorVolumeMountPath: /tmp
45
46
47
48 #===========================HADOOP RELATED CONFIGs===============================
49 # config map of the hdfs
50 hadoopConfigMap: hdfs-1-config
51
52
53 ###################################################################################
54
55
56
57