c08d9095a664d1c88c46615e76895328350fa085
[dmaap/messagerouter/msgrtr.git] / src / main / java / com / att / dmf / mr / backends / kafka / KafkaPublisher.java
1 /*******************************************************************************
2  *  ============LICENSE_START=======================================================
3  *  org.onap.dmaap
4  *  ================================================================================
5  *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
6  *  ================================================================================
7  *  Licensed under the Apache License, Version 2.0 (the "License");
8  *  you may not use this file except in compliance with the License.
9  *  You may obtain a copy of the License at
10  *        http://www.apache.org/licenses/LICENSE-2.0
11 *  
12  *  Unless required by applicable law or agreed to in writing, software
13  *  distributed under the License is distributed on an "AS IS" BASIS,
14  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  *  See the License for the specific language governing permissions and
16  *  limitations under the License.
17  *  ============LICENSE_END=========================================================
18  *  
19  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
20  *  
21  *******************************************************************************/
22 package com.att.dmf.mr.backends.kafka;
23
24 import java.io.IOException;
25 import java.util.ArrayList;
26 import java.util.LinkedList;
27 import java.util.List;
28 import java.util.Properties;
29
30 import org.apache.kafka.clients.producer.KafkaProducer;
31 import org.apache.kafka.clients.producer.Producer;
32 import org.apache.kafka.clients.producer.ProducerRecord;
33 import org.json.JSONException;
34 import org.springframework.beans.factory.annotation.Qualifier;
35
36 import com.att.dmf.mr.backends.Publisher;
37 import com.att.dmf.mr.constants.CambriaConstants;
38 //import org.slf4j.Logger;
39 //import org.slf4j.LoggerFactory;
40 import com.att.eelf.configuration.EELFLogger;
41 import com.att.eelf.configuration.EELFManager;
42 import com.att.nsa.drumlin.till.nv.rrNvReadable;
43
44
45
46 /**
47  * Sends raw JSON objects into Kafka.
48  * 
49  * Could improve space: BSON rather than JSON?
50  * 
51  * @author peter
52  *
53  */
54
55 public class KafkaPublisher implements Publisher {
56         /**
57          * constructor initializing
58          * 
59          * @param settings
60          * @throws rrNvReadable.missingReqdSetting
61          */
62         public KafkaPublisher(@Qualifier("propertyReader") rrNvReadable settings) throws rrNvReadable.missingReqdSetting {
63                 //fSettings = settings;
64
65                 final Properties props = new Properties();
66                 /*transferSetting(fSettings, props, "metadata.broker.list", "localhost:9092");
67                 transferSetting(fSettings, props, "request.required.acks", "1");
68                 transferSetting(fSettings, props, "message.send.max.retries", "5");
69                 transferSetting(fSettings, props, "retry.backoff.ms", "150"); */
70                 String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list");
71                 if(null==kafkaConnUrl){
72                         
73                         kafkaConnUrl="localhost:9092";
74                 }
75                 
76         
77             
78                 transferSetting( props, "sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
79                 transferSetting( props, "security.protocol", "SASL_PLAINTEXT");
80                 transferSetting( props, "sasl.mechanism", "PLAIN");             
81                 transferSetting( props, "bootstrap.servers",kafkaConnUrl);
82                         
83                 transferSetting( props, "request.required.acks", "1");
84                 transferSetting( props, "message.send.max.retries", "5");
85                 transferSetting(props, "retry.backoff.ms", "150"); 
86
87                 
88                 
89                 props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
90                  props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
91
92                 
93                 
94                 fProducer = new KafkaProducer<>(props);
95         }
96
97         /**
98          * Send a message with a given topic and key.
99          * 
100          * @param msg
101          * @throws FailedToSendMessageException
102          * @throws JSONException
103          */
104         @Override
105         public void sendMessage(String topic, message msg) throws IOException{
106                 final List<message> msgs = new LinkedList<message>();
107                 msgs.add(msg);
108                 sendMessages(topic, msgs);
109         }
110
111         /**  
112          * method publishing batch messages
113         * This method is commented from 0.8 to 0.11 upgrade
114          * @param topic
115          * @param kms
116          * throws IOException
117          *
118         public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
119                 try {
120                         fProducer.send(kms);
121
122                 } catch (FailedToSendMessageException excp) { 
123                         log.error("Failed to send message(s) to topic [" + topic + "].", excp);
124                         throw new FailedToSendMessageException(excp.getMessage(), excp);
125                 }
126
127         } */
128
129
130         /*
131          * Kafka 11.0 Interface
132          * @see com.att.nsa.cambria.backends.Publisher#sendBatchMessageNew(java.lang.String, java.util.ArrayList)
133          */
134         public void sendBatchMessageNew(String topic, ArrayList <ProducerRecord<String,String>> kms) throws IOException {
135                 try {
136                         for (ProducerRecord<String,String> km : kms) {
137                                 fProducer.send(km);
138                         }
139
140                 } catch (Exception excp) { 
141                         log.error("Failed to send message(s) to topic [" + topic + "].", excp);
142                         throw new IOException(excp.getMessage(), excp);
143                 }
144
145         }
146         
147         /**
148          * Send a set of messages. Each must have a "key" string value.
149          * 
150          * @param topic
151          * @param msg
152          * @throws FailedToSendMessageException
153          * @throws JSONException
154          *
155         @Override
156         public void sendMessages(String topic, List<? extends message> msgs)
157                         throws IOException, FailedToSendMessageException {
158                 log.info("sending " + msgs.size() + " events to [" + topic + "]");
159
160                 final List<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(msgs.size());
161                 for (message o : msgs) {
162                         final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, o.getKey(), o.toString());
163                         kms.add(data);
164                 }
165                 try {
166                         fProducer.send(kms);
167
168                 } catch (FailedToSendMessageException excp) {
169                         log.error("Failed to send message(s) to topic [" + topic + "].", excp);
170                         throw new FailedToSendMessageException(excp.getMessage(), excp);
171                 }
172         } */
173         @Override
174         public void sendMessagesNew(String topic, List<? extends message> msgs)
175                         throws IOException {
176                 log.info("sending " + msgs.size() + " events to [" + topic + "]");
177 try{
178                 final List<ProducerRecord<String, String>> kms = new ArrayList<>(msgs.size());
179                         for (message o : msgs) {
180                         
181                         final ProducerRecord<String, String> data = new ProducerRecord<>(topic, o.getKey(), o.toString());
182                         
183                 
184                 try {
185
186                         fProducer.send(data);
187
188                 } catch (Exception excp) {
189                         log.error("Failed to send message(s) to topic [" + topic + "].", excp);
190                         throw new Exception(excp.getMessage(), excp);
191                 }
192         }
193                 
194         }catch(Exception e){}
195 }
196         //private final rrNvReadable fSettings;
197
198         
199         private Producer<String, String> fProducer;
200
201   /**
202    * It sets the key value pair
203    * @param topic
204    * @param msg 
205    * @param key
206    * @param defVal
207    */
208         private void transferSetting(Properties props, String key, String defVal) {
209                 String kafka_prop= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka." + key);
210                 if (null==kafka_prop) kafka_prop=defVal;
211                 //props.put(key, settings.getString("kafka." + key, defVal));
212                 props.put(key, kafka_prop);
213         }
214
215         //private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class);
216
217         private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class);
218
219         @Override
220         public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
221                 // TODO Auto-generated method stub
222                 
223         }
224
225         
226 }