1 # Default values for sample-spark-app.
2 # This is a YAML-formatted file.
3 # Declare variables to be passed into your templates.
6 #===========================KUBERNETES POD RELATED CONFIGs========================
7 image: spark-tf-keras-horo:latest
10 volumesName: test-volume
12 hostpathType: Directory
16 #============================SPARK APP RELATED CONFIGs=============================
18 nameOfTheSparkApp: spark-apache-logs2
19 # Python or Scala supported.
20 programmingLanguageType: Scala
21 modeOfSparkApp: cluster
22 mainClassOfTheSparkApp: ApacheLogAnalysis
23 # can be http path, s3 path, minio path
24 mainApplicationFileOfTheSparkApp: https://github.com/mohanraj1311/ApacheLogAnalysisJar/raw/master/analysisofapachelogs_2.11-0.1.jar
25 argumentsOfTheSparkProgram:
26 - hdfs://hdfs-1-namenode-1.hdfs-1-namenode.hdfs1.svc.cluster.local:8020/data/apache-logs
30 #============================SPARK DRIVER RELATED CONFIGs=========================
34 driverVolumeMountsName: test-volume
35 driverVolumeMountPath: /tmp
39 #============================SPARK EXECUTOR RELATED CONFIGs=======================
43 executorVolumeMountsName: test-volume
44 executorVolumeMountPath: /tmp
48 #===========================HADOOP RELATED CONFIGs===============================
49 # config map of the hdfs
50 hadoopConfigMap: hdfs-1-config
53 ###################################################################################