Sonar major issues
[dmaap/messagerouter/msgrtr.git] / src / main / java / com / att / dmf / mr / backends / kafka / KafkaPublisher.java
1 /*******************************************************************************
2  *  ============LICENSE_START=======================================================
3  *  org.onap.dmaap
4  *  ================================================================================
5  *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
6  *  ================================================================================
7  *  Licensed under the Apache License, Version 2.0 (the "License");
8  *  you may not use this file except in compliance with the License.
9  *  You may obtain a copy of the License at
10  *        http://www.apache.org/licenses/LICENSE-2.0
11 *  
12  *  Unless required by applicable law or agreed to in writing, software
13  *  distributed under the License is distributed on an "AS IS" BASIS,
14  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  *  See the License for the specific language governing permissions and
16  *  limitations under the License.
17  *  ============LICENSE_END=========================================================
18  *  
19  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
20  *  
21  *******************************************************************************/
22 package com.att.dmf.mr.backends.kafka;
23
24 import java.io.IOException;
25 import java.util.ArrayList;
26 import java.util.LinkedList;
27 import java.util.List;
28 import java.util.Properties;
29
30 import org.apache.kafka.clients.producer.KafkaProducer;
31 import org.apache.kafka.clients.producer.Producer;
32 import org.apache.kafka.clients.producer.ProducerRecord;
33 import org.json.JSONException;
34 import org.springframework.beans.factory.annotation.Qualifier;
35
36 import com.att.dmf.mr.backends.Publisher;
37 import com.att.dmf.mr.constants.CambriaConstants;
38 //import org.slf4j.Logger;
39 //import org.slf4j.LoggerFactory;
40 import com.att.eelf.configuration.EELFLogger;
41 import com.att.eelf.configuration.EELFManager;
42 import com.att.nsa.drumlin.till.nv.rrNvReadable;
43
44
45
46 /**
47  * Sends raw JSON objects into Kafka.
48  * 
49  * Could improve space: BSON rather than JSON?
50  * 
51  * @author peter
52  *
53  */
54
55 public class KafkaPublisher implements Publisher {
56         /**
57          * constructor initializing
58          * 
59          * @param settings
60          * @throws rrNvReadable.missingReqdSetting
61          */
62         public KafkaPublisher(@Qualifier("propertyReader") rrNvReadable settings) throws rrNvReadable.missingReqdSetting {
63                 //fSettings = settings;
64
65                 final Properties props = new Properties();
66                 /*transferSetting(fSettings, props, "metadata.broker.list", "localhost:9092");
67                 transferSetting(fSettings, props, "request.required.acks", "1");
68                 transferSetting(fSettings, props, "message.send.max.retries", "5");
69                 transferSetting(fSettings, props, "retry.backoff.ms", "150"); */
70                 String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list");
71                 if(null==kafkaConnUrl){
72                         
73                         kafkaConnUrl="localhost:9092";
74                 }
75                 
76         
77         
78                 
79                 transferSetting( props, "bootstrap.servers",kafkaConnUrl);
80                         
81                 transferSetting( props, "request.required.acks", "1");
82                 transferSetting( props, "message.send.max.retries", "5");
83                 transferSetting(props, "retry.backoff.ms", "150"); 
84
85                 
86                 
87                 props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
88                  props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
89
90                 
91                 
92                 fProducer = new KafkaProducer<>(props);
93         }
94
95         /**
96          * Send a message with a given topic and key.
97          * 
98          * @param msg
99          * @throws FailedToSendMessageException
100          * @throws JSONException
101          */
102         @Override
103         public void sendMessage(String topic, message msg) throws IOException{
104                 final List<message> msgs = new LinkedList<message>();
105                 msgs.add(msg);
106                 sendMessages(topic, msgs);
107         }
108
109         /**  
110          * method publishing batch messages
111         * This method is commented from 0.8 to 0.11 upgrade
112          * @param topic
113          * @param kms
114          * throws IOException
115          *
116         public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
117                 try {
118                         fProducer.send(kms);
119
120                 } catch (FailedToSendMessageException excp) { 
121                         log.error("Failed to send message(s) to topic [" + topic + "].", excp);
122                         throw new FailedToSendMessageException(excp.getMessage(), excp);
123                 }
124
125         } */
126
127
128         /*
129          * Kafka 11.0 Interface
130          * @see com.att.nsa.cambria.backends.Publisher#sendBatchMessageNew(java.lang.String, java.util.ArrayList)
131          */
132         public void sendBatchMessageNew(String topic, ArrayList <ProducerRecord<String,String>> kms) throws IOException {
133                 try {
134                         for (ProducerRecord<String,String> km : kms) {
135                                 fProducer.send(km);
136                         }
137
138                 } catch (Exception excp) { 
139                         log.error("Failed to send message(s) to topic [" + topic + "].", excp);
140                         throw new IOException(excp.getMessage(), excp);
141                 }
142
143         }
144         
145         /**
146          * Send a set of messages. Each must have a "key" string value.
147          * 
148          * @param topic
149          * @param msg
150          * @throws FailedToSendMessageException
151          * @throws JSONException
152          *
153         @Override
154         public void sendMessages(String topic, List<? extends message> msgs)
155                         throws IOException, FailedToSendMessageException {
156                 log.info("sending " + msgs.size() + " events to [" + topic + "]");
157
158                 final List<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(msgs.size());
159                 for (message o : msgs) {
160                         final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, o.getKey(), o.toString());
161                         kms.add(data);
162                 }
163                 try {
164                         fProducer.send(kms);
165
166                 } catch (FailedToSendMessageException excp) {
167                         log.error("Failed to send message(s) to topic [" + topic + "].", excp);
168                         throw new FailedToSendMessageException(excp.getMessage(), excp);
169                 }
170         } */
171         @Override
172         public void sendMessagesNew(String topic, List<? extends message> msgs)
173                         throws IOException {
174                 log.info("sending " + msgs.size() + " events to [" + topic + "]");
175 try{
176                 final List<ProducerRecord<String, String>> kms = new ArrayList<>(msgs.size());
177                         for (message o : msgs) {
178                         
179                         final ProducerRecord<String, String> data = new ProducerRecord<>(topic, o.getKey(), o.toString());
180                         
181                 
182                 try {
183
184                         fProducer.send(data);
185
186                 } catch (Exception excp) {
187                         log.error("Failed to send message(s) to topic [" + topic + "].", excp);
188                         throw new Exception(excp.getMessage(), excp);
189                 }
190         }
191                 
192         }catch(Exception e){}
193 }
194         //private final rrNvReadable fSettings;
195
196         
197         private Producer<String, String> fProducer;
198
199   /**
200    * It sets the key value pair
201    * @param topic
202    * @param msg 
203    * @param key
204    * @param defVal
205    */
206         private void transferSetting(Properties props, String key, String defVal) {
207                 String kafka_prop= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka." + key);
208                 if (null==kafka_prop) kafka_prop=defVal;
209                 //props.put(key, settings.getString("kafka." + key, defVal));
210                 props.put(key, kafka_prop);
211         }
212
213         //private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class);
214
215         private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class);
216
217         @Override
218         public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
219                 // TODO Auto-generated method stub
220                 
221         }
222
223         
224 }