add testcases
[dmaap/messagerouter/msgrtr.git] / src / main / java / org / onap / dmaap / dmf / mr / backends / kafka / KafkaPublisher.java
1 /*******************************************************************************
2  *  ============LICENSE_START=======================================================
3  *  org.onap.dmaap
4  *  ================================================================================
5  *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
6  *  ================================================================================
7  *  Licensed under the Apache License, Version 2.0 (the "License");
8  *  you may not use this file except in compliance with the License.
9  *  You may obtain a copy of the License at
10  *        http://www.apache.org/licenses/LICENSE-2.0
11 *  
12  *  Unless required by applicable law or agreed to in writing, software
13  *  distributed under the License is distributed on an "AS IS" BASIS,
14  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  *  See the License for the specific language governing permissions and
16  *  limitations under the License.
17  *  ============LICENSE_END=========================================================
18  *  
19  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
20  *  
21  *******************************************************************************/
22 package org.onap.dmaap.dmf.mr.backends.kafka;
23
24 import java.io.IOException;
25 import java.util.ArrayList;
26 import java.util.LinkedList;
27 import java.util.List;
28 import java.util.Properties;
29
30 import org.apache.kafka.clients.producer.KafkaProducer;
31 import org.apache.kafka.clients.producer.Producer;
32 import org.apache.kafka.clients.producer.ProducerRecord;
33 import org.json.JSONException;
34 import org.springframework.beans.factory.annotation.Qualifier;
35 import org.springframework.util.StringUtils;
36 import org.onap.dmaap.dmf.mr.backends.Publisher;
37 import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
38 import org.onap.dmaap.dmf.mr.utils.Utils;
39 //import org.slf4j.Logger;
40 //import org.slf4j.LoggerFactory;
41 import com.att.eelf.configuration.EELFLogger;
42 import com.att.eelf.configuration.EELFManager;
43 import com.att.nsa.drumlin.till.nv.rrNvReadable;
44
45
46
47 /**
48  * Sends raw JSON objects into Kafka.
49  * 
50  * Could improve space: BSON rather than JSON?
51  * 
52  * @author peter
53  *
54  */
55
56 public class KafkaPublisher implements Publisher {
57         /**
58          * constructor initializing
59          * 
60          * @param settings
61          * @throws rrNvReadable.missingReqdSetting
62          */
63         public KafkaPublisher(@Qualifier("propertyReader") rrNvReadable settings) throws rrNvReadable.missingReqdSetting {
64
65                 final Properties props = new Properties();
66                 String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list");
67                 if(StringUtils.isEmpty(kafkaConnUrl)){
68                         
69                         kafkaConnUrl="localhost:9092";
70                 }
71                 
72         
73             if(Utils.isCadiEnabled()){
74                 transferSetting( props, "sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
75                 transferSetting( props, "security.protocol", "SASL_PLAINTEXT");
76                 transferSetting( props, "sasl.mechanism", "PLAIN");     
77             }
78                 transferSetting( props, "bootstrap.servers",kafkaConnUrl);
79                         
80                 transferSetting( props, "request.required.acks", "1");
81                 transferSetting( props, "message.send.max.retries", "5");
82                 transferSetting(props, "retry.backoff.ms", "150"); 
83
84                 
85                 
86                 props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
87                  props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
88
89                 
90                 
91                 fProducer = new KafkaProducer<>(props);
92         }
93
94         /**
95          * Send a message with a given topic and key.
96          * 
97          * @param msg
98          * @throws FailedToSendMessageException
99          * @throws JSONException
100          */
101         @Override
102         public void sendMessage(String topic, message msg) throws IOException{
103                 final List<message> msgs = new LinkedList<message>();
104                 msgs.add(msg);
105                 sendMessages(topic, msgs);
106         }
107
108         /**  
109          * method publishing batch messages
110         * This method is commented from 0.8 to 0.11 upgrade
111          * @param topic
112          * @param kms
113          * throws IOException
114          *
115         public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
116                 try {
117                         fProducer.send(kms);
118
119                 } catch (FailedToSendMessageException excp) { 
120                         log.error("Failed to send message(s) to topic [" + topic + "].", excp);
121                         throw new FailedToSendMessageException(excp.getMessage(), excp);
122                 }
123
124         } */
125
126
127         /*
128          * Kafka 11.0 Interface
129          * @see com.att.nsa.cambria.backends.Publisher#sendBatchMessageNew(java.lang.String, java.util.ArrayList)
130          */
131         public void sendBatchMessageNew(String topic, ArrayList <ProducerRecord<String,String>> kms) throws IOException {
132                 try {
133                         for (ProducerRecord<String,String> km : kms) {
134                                 fProducer.send(km);
135                         }
136
137                 } catch (Exception excp) { 
138                         log.error("Failed to send message(s) to topic [" + topic + "].", excp);
139                         throw new IOException(excp.getMessage(), excp);
140                 }
141
142         }
143         
144         /**
145          * Send a set of messages. Each must have a "key" string value.
146          * 
147          * @param topic
148          * @param msg
149          * @throws FailedToSendMessageException
150          * @throws JSONException
151          *
152         @Override
153         public void sendMessages(String topic, List<? extends message> msgs)
154                         throws IOException, FailedToSendMessageException {
155                 log.info("sending " + msgs.size() + " events to [" + topic + "]");
156
157                 final List<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(msgs.size());
158                 for (message o : msgs) {
159                         final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, o.getKey(), o.toString());
160                         kms.add(data);
161                 }
162                 try {
163                         fProducer.send(kms);
164
165                 } catch (FailedToSendMessageException excp) {
166                         log.error("Failed to send message(s) to topic [" + topic + "].", excp);
167                         throw new FailedToSendMessageException(excp.getMessage(), excp);
168                 }
169         } */
170         @Override
171         public void sendMessagesNew(String topic, List<? extends message> msgs)
172                         throws IOException {
173                 log.info("sending " + msgs.size() + " events to [" + topic + "]");
174 try{
175                 final List<ProducerRecord<String, String>> kms = new ArrayList<>(msgs.size());
176                         for (message o : msgs) {
177                         
178                         final ProducerRecord<String, String> data = new ProducerRecord<>(topic, o.getKey(), o.toString());
179                         
180                 
181                 try {
182
183                         fProducer.send(data);
184
185                 } catch (Exception excp) {
186                         log.error("Failed to send message(s) to topic [" + topic + "].", excp);
187                         throw new Exception(excp.getMessage(), excp);
188                 }
189         }
190                 
191         }catch(Exception e){}
192 }
193         //private final rrNvReadable fSettings;
194
195         
196         private Producer<String, String> fProducer;
197
198   /**
199    * It sets the key value pair
200    * @param topic
201    * @param msg 
202    * @param key
203    * @param defVal
204    */
205         private void transferSetting(Properties props, String key, String defVal) {
206                 String kafka_prop= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka." + key);
207                 if (StringUtils.isEmpty(kafka_prop)) kafka_prop=defVal;
208                 //props.put(key, settings.getString("kafka." + key, defVal));
209                 props.put(key, kafka_prop);
210         }
211
212         //private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class);
213
214         private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class);
215
216         @Override
217         public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
218                 // TODO Auto-generated method stub
219                 
220         }
221
222         
223 }