Collectd operator utilties
[demo.git] / vnfs / DAaaS / deploy / training-core / hdfs-writer-source-code / hdfs-writer / src / main / java / CreateKafkaConsumer.java
1 import config.Configuration;
2 import org.apache.hadoop.fs.FSDataOutputStream;
3 import org.apache.hadoop.fs.FileSystem;
4 import org.apache.kafka.clients.consumer.ConsumerConfig;
5 import org.apache.kafka.clients.consumer.ConsumerRecord;
6 import org.apache.kafka.clients.consumer.ConsumerRecords;
7 import org.apache.kafka.clients.consumer.KafkaConsumer;
8
9 import org.slf4j.Logger;
10 import org.slf4j.LoggerFactory;
11
12 import java.io.IOException;
13 import java.util.*;
14
15 public class CreateKafkaConsumer {
16
17
18     private static Logger log = LoggerFactory.getLogger(CreateKafkaConsumer.class);
19
20     private final String BOOTSTRAP_SERVERS = (String) Configuration.getSettings().get("kafka").get("bootStrapServers");
21     private final String GROUP_ID_CONFIG = (String) Configuration.getSettings().get("kafka").get("group_id");
22     private final String KEY_DESERIALIZER = (String) Configuration.getSettings().get("kafka").get("key_deserialize_class");
23     private final String VAL_DESERIALIZER = (String) Configuration.getSettings().get("kafka").get("value_deserialize_class");
24     private final String KAFKA_TOPIC = (String) Configuration.getSettings().get("kafka").get("topic");
25
26     private final String HDFS_URL= (String) Configuration.getSettings().get("hdfs").get("hdfsURL");
27     private final String HDFS_REMOTE_FILE = (String) Configuration.getSettings().get("hdfs").get("hdfs_remote_file");
28
29     private KafkaConsumer<String, String> kafkaConsumer;
30     private Properties properties = new Properties();
31     private HdfsWriter hdfsWriter;
32     private FileSystem hdfsFileSystem;
33
34
35
36     public CreateKafkaConsumer() throws IOException{
37         setKafkaProperties();
38         kafkaConsumer = new KafkaConsumer<>(properties);
39         kafkaConsumer.subscribe(Collections.singletonList(KAFKA_TOPIC));
40         hdfsWriter = new HdfsWriter();
41         hdfsFileSystem = hdfsWriter.createHdfsFileSystem(HDFS_URL);
42         log.info(":::Created kafkaConsumer:::");
43     }
44
45     private void setKafkaProperties(){
46
47         properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
48         properties.put(ConsumerConfig.GROUP_ID_CONFIG, GROUP_ID_CONFIG);
49         properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, KEY_DESERIALIZER);
50         properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, VAL_DESERIALIZER);
51         log.info(":::Set kafka properties:::");
52     }
53
54
55     public void processKafkaMessage() throws IOException{
56         try{
57             while(true){
58                 ConsumerRecords<String, String> recordsPerPartition = kafkaConsumer.poll(100000);
59                 if(recordsPerPartition.isEmpty())
60                     log.info(":::recordsPerPartition is NULL:::");
61                 else
62                     log.info(":::size of recordsPerPartition: "+recordsPerPartition.count()+" :::");
63
64                 for(ConsumerRecord<String, String> record:recordsPerPartition){
65                     log.info("Topic: "+record.topic());
66                     log.info("partition: "+record.partition());
67                     log.info("ReceivedKey: "+record.key()+" ReceivedValue: "+record.value());
68                     FSDataOutputStream fsDataOutputStream = hdfsWriter.invokeHdfsWriter(hdfsFileSystem, HDFS_REMOTE_FILE);
69                     hdfsWriter.writeMessageToHdfs(fsDataOutputStream, record.value());
70                     fsDataOutputStream.close();
71                 }
72
73                 }
74         }
75
76         finally {
77                 log.info(":::Closing kafkaConsumer:::");
78                 kafkaConsumer.close();
79         }
80     }
81 }