1 /*******************************************************************************
2 * ============LICENSE_START=======================================================
4 * ================================================================================
5 * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
6 * ================================================================================
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 * ============LICENSE_END=========================================================
19 * ECOMP is a trademark and service mark of AT&T Intellectual Property.
21 *******************************************************************************/
22 package com.att.dmf.mr.backends.kafka;
24 import java.io.IOException;
25 import java.util.ArrayList;
26 import java.util.LinkedList;
27 import java.util.List;
28 import java.util.Properties;
30 import org.apache.kafka.clients.producer.KafkaProducer;
31 import org.apache.kafka.clients.producer.Producer;
32 import org.apache.kafka.clients.producer.ProducerRecord;
33 import org.json.JSONException;
34 import org.springframework.beans.factory.annotation.Qualifier;
36 import com.att.dmf.mr.backends.Publisher;
37 import com.att.dmf.mr.constants.CambriaConstants;
38 import com.att.dmf.mr.utils.Utils;
39 //import org.slf4j.Logger;
40 //import org.slf4j.LoggerFactory;
41 import com.att.eelf.configuration.EELFLogger;
42 import com.att.eelf.configuration.EELFManager;
43 import com.att.nsa.drumlin.till.nv.rrNvReadable;
48 * Sends raw JSON objects into Kafka.
50 * Could improve space: BSON rather than JSON?
56 public class KafkaPublisher implements Publisher {
58 * constructor initializing
61 * @throws rrNvReadable.missingReqdSetting
63 public KafkaPublisher(@Qualifier("propertyReader") rrNvReadable settings) throws rrNvReadable.missingReqdSetting {
64 //fSettings = settings;
66 final Properties props = new Properties();
67 /*transferSetting(fSettings, props, "metadata.broker.list", "localhost:9092");
68 transferSetting(fSettings, props, "request.required.acks", "1");
69 transferSetting(fSettings, props, "message.send.max.retries", "5");
70 transferSetting(fSettings, props, "retry.backoff.ms", "150"); */
71 String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list");
72 if(null==kafkaConnUrl){
74 kafkaConnUrl="localhost:9092";
78 if(Utils.isCadiEnabled()){
79 transferSetting( props, "sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
80 transferSetting( props, "security.protocol", "SASL_PLAINTEXT");
81 transferSetting( props, "sasl.mechanism", "PLAIN");
83 transferSetting( props, "bootstrap.servers",kafkaConnUrl);
85 transferSetting( props, "request.required.acks", "1");
86 transferSetting( props, "message.send.max.retries", "5");
87 transferSetting(props, "retry.backoff.ms", "150");
91 props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
92 props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
96 fProducer = new KafkaProducer<>(props);
100 * Send a message with a given topic and key.
103 * @throws FailedToSendMessageException
104 * @throws JSONException
107 public void sendMessage(String topic, message msg) throws IOException{
108 final List<message> msgs = new LinkedList<message>();
110 sendMessages(topic, msgs);
114 * method publishing batch messages
115 * This method is commented from 0.8 to 0.11 upgrade
120 public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
124 } catch (FailedToSendMessageException excp) {
125 log.error("Failed to send message(s) to topic [" + topic + "].", excp);
126 throw new FailedToSendMessageException(excp.getMessage(), excp);
133 * Kafka 11.0 Interface
134 * @see com.att.nsa.cambria.backends.Publisher#sendBatchMessageNew(java.lang.String, java.util.ArrayList)
136 public void sendBatchMessageNew(String topic, ArrayList <ProducerRecord<String,String>> kms) throws IOException {
138 for (ProducerRecord<String,String> km : kms) {
142 } catch (Exception excp) {
143 log.error("Failed to send message(s) to topic [" + topic + "].", excp);
144 throw new IOException(excp.getMessage(), excp);
150 * Send a set of messages. Each must have a "key" string value.
154 * @throws FailedToSendMessageException
155 * @throws JSONException
158 public void sendMessages(String topic, List<? extends message> msgs)
159 throws IOException, FailedToSendMessageException {
160 log.info("sending " + msgs.size() + " events to [" + topic + "]");
162 final List<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(msgs.size());
163 for (message o : msgs) {
164 final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, o.getKey(), o.toString());
170 } catch (FailedToSendMessageException excp) {
171 log.error("Failed to send message(s) to topic [" + topic + "].", excp);
172 throw new FailedToSendMessageException(excp.getMessage(), excp);
176 public void sendMessagesNew(String topic, List<? extends message> msgs)
178 log.info("sending " + msgs.size() + " events to [" + topic + "]");
180 final List<ProducerRecord<String, String>> kms = new ArrayList<>(msgs.size());
181 for (message o : msgs) {
183 final ProducerRecord<String, String> data = new ProducerRecord<>(topic, o.getKey(), o.toString());
188 fProducer.send(data);
190 } catch (Exception excp) {
191 log.error("Failed to send message(s) to topic [" + topic + "].", excp);
192 throw new Exception(excp.getMessage(), excp);
196 }catch(Exception e){}
198 //private final rrNvReadable fSettings;
201 private Producer<String, String> fProducer;
204 * It sets the key value pair
210 private void transferSetting(Properties props, String key, String defVal) {
211 String kafka_prop= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka." + key);
212 if (null==kafka_prop) kafka_prop=defVal;
213 //props.put(key, settings.getString("kafka." + key, defVal));
214 props.put(key, kafka_prop);
217 //private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class);
219 private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class);
222 public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
223 // TODO Auto-generated method stub