1 # Default values for sample-spark-app.
2 # This is a YAML-formatted file.
3 # Declare variables to be passed into your templates.
5 #===========================KUBERNETES POD RELATED CONFIGs========================
6 image: spark-tf-keras-horo:latest
9 volumesName: test-volume
11 hostpathType: Directory
13 #============================SPARK APP RELATED CONFIGs=============================
14 nameOfTheSparkApp: spark-apache-logs2
15 # Python or Scala supported.
16 programmingLanguageType: Scala
17 modeOfSparkApp: cluster
18 mainClassOfTheSparkApp: ApacheLogAnalysis
19 # can be http path, s3 path, minio path
20 mainApplicationFileOfTheSparkApp: https://github.com/mohanraj1311/ApacheLogAnalysisJar/raw/master/analysisofapachelogs_2.11-0.1.jar
21 argumentsOfTheSparkProgram:
22 - hdfs://hdfs-1-namenode-1.hdfs-1-namenode.hdfs1.svc.cluster.local:8020/data/apache-logs
24 #============================SPARK DRIVER RELATED CONFIGs=========================
28 driverVolumeMountsName: test-volume
29 driverVolumeMountPath: /tmp
31 #============================SPARK EXECUTOR RELATED CONFIGs=======================
35 executorVolumeMountsName: test-volume
36 executorVolumeMountPath: /tmp
38 #===========================HADOOP RELATED CONFIGs===============================
39 # config map of the hdfs
40 hadoopConfigMap: hdfs-1-config
42 ###################################################################################