1 /*******************************************************************************
2 * ============LICENSE_START=======================================================
4 * ================================================================================
5 * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
6 * ================================================================================
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 * ============LICENSE_END=========================================================
19 * ECOMP is a trademark and service mark of AT&T Intellectual Property.
21 *******************************************************************************/
22 package com.att.dmf.mr.backends.kafka;
24 import java.io.IOException;
25 import java.util.ArrayList;
26 import java.util.LinkedList;
27 import java.util.List;
28 import java.util.Properties;
30 import org.apache.kafka.clients.producer.KafkaProducer;
31 import org.apache.kafka.clients.producer.Producer;
32 import org.apache.kafka.clients.producer.ProducerRecord;
33 import org.json.JSONException;
34 import org.springframework.beans.factory.annotation.Qualifier;
36 import com.att.dmf.mr.backends.Publisher;
37 import com.att.dmf.mr.constants.CambriaConstants;
38 //import org.slf4j.Logger;
39 //import org.slf4j.LoggerFactory;
40 import com.att.eelf.configuration.EELFLogger;
41 import com.att.eelf.configuration.EELFManager;
42 import com.att.nsa.drumlin.till.nv.rrNvReadable;
44 //import kafka.FailedToSendMessageException;
45 //import kafka.javaapi.producer.Producer;
46 //import kafka.producer.KeyedMessage;
47 //import kafka.producer.ProducerConfig;
48 //import kafka.producer.KeyedMessage;
51 * Sends raw JSON objects into Kafka.
53 * Could improve space: BSON rather than JSON?
59 public class KafkaPublisher implements Publisher {
61 * constructor initializing
64 * @throws rrNvReadable.missingReqdSetting
66 public KafkaPublisher(@Qualifier("propertyReader") rrNvReadable settings) throws rrNvReadable.missingReqdSetting {
67 //fSettings = settings;
69 final Properties props = new Properties();
70 /*transferSetting(fSettings, props, "metadata.broker.list", "localhost:9092");
71 transferSetting(fSettings, props, "request.required.acks", "1");
72 transferSetting(fSettings, props, "message.send.max.retries", "5");
73 transferSetting(fSettings, props, "retry.backoff.ms", "150"); */
74 String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list");
75 if(null==kafkaConnUrl){
77 kafkaConnUrl="localhost:9092";
79 //String jaaspath="C:/ATT/Apps/dmaapCodedmaap-framework/dmaap/bundleconfig-local/etc/appprops/kafka_pub_jaas.conf";
80 // props.put("bootstrap.servers", bootSever);
81 //System.setProperty("java.security.auth.login.config",jaaspath);
83 /*transferSetting( props, "sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
84 transferSetting( props, "security.protocol", "SASL_PLAINTEXT");
85 transferSetting( props, "sasl.mechanism", "PLAIN");*/
86 transferSetting( props, "bootstrap.servers",kafkaConnUrl);
87 //transferSetting( props, "metadata.broker.list", kafkaConnUrl);
88 transferSetting( props, "request.required.acks", "1");
89 transferSetting( props, "message.send.max.retries", "5");
90 transferSetting(props, "retry.backoff.ms", "150");
92 //props.put("serializer.class", "kafka.serializer.StringEncoder");
94 props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
95 props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
97 //fConfig = new ProducerConfig(props);
98 //fProducer = new Producer<String, String>(fConfig);
99 fProducer = new KafkaProducer<>(props);
103 * Send a message with a given topic and key.
106 * @throws FailedToSendMessageException
107 * @throws JSONException
110 public void sendMessage(String topic, message msg) throws IOException{
111 final List<message> msgs = new LinkedList<message>();
113 sendMessages(topic, msgs);
117 * method publishing batch messages
118 * This method is commented from 0.8 to 0.11 upgrade
123 public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
127 } catch (FailedToSendMessageException excp) {
128 log.error("Failed to send message(s) to topic [" + topic + "].", excp);
129 throw new FailedToSendMessageException(excp.getMessage(), excp);
136 * Kafka 11.0 Interface
137 * @see com.att.nsa.cambria.backends.Publisher#sendBatchMessageNew(java.lang.String, java.util.ArrayList)
139 public void sendBatchMessageNew(String topic, ArrayList <ProducerRecord<String,String>> kms) throws IOException {
141 for (ProducerRecord<String,String> km : kms) {
145 } catch (Exception excp) {
146 log.error("Failed to send message(s) to topic [" + topic + "].", excp);
147 throw new IOException(excp.getMessage(), excp);
153 * Send a set of messages. Each must have a "key" string value.
157 * @throws FailedToSendMessageException
158 * @throws JSONException
161 public void sendMessages(String topic, List<? extends message> msgs)
162 throws IOException, FailedToSendMessageException {
163 log.info("sending " + msgs.size() + " events to [" + topic + "]");
165 final List<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(msgs.size());
166 for (message o : msgs) {
167 final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, o.getKey(), o.toString());
173 } catch (FailedToSendMessageException excp) {
174 log.error("Failed to send message(s) to topic [" + topic + "].", excp);
175 throw new FailedToSendMessageException(excp.getMessage(), excp);
179 public void sendMessagesNew(String topic, List<? extends message> msgs)
181 log.info("sending " + msgs.size() + " events to [" + topic + "]");
183 final List<ProducerRecord<String, String>> kms = new ArrayList<ProducerRecord<String, String>>(msgs.size());
184 for (message o : msgs) {
186 final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, o.getKey(), o.toString());
191 fProducer.send(data);
193 } catch (Exception excp) {
194 log.error("Failed to send message(s) to topic [" + topic + "].", excp);
195 throw new Exception(excp.getMessage(), excp);
199 }catch(Exception e){}
201 //private final rrNvReadable fSettings;
203 //private ProducerConfig fConfig;
204 private Producer<String, String> fProducer;
207 * It sets the key value pair
213 private void transferSetting(Properties props, String key, String defVal) {
214 String kafka_prop= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka." + key);
215 if (null==kafka_prop) kafka_prop=defVal;
216 //props.put(key, settings.getString("kafka." + key, defVal));
217 props.put(key, kafka_prop);
220 //private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class);
222 private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class);
225 public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
226 // TODO Auto-generated method stub
231 //public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
232 // TODO Auto-generated method stub