74f4ef60bf2a62931d96da75889c9a9cba846fe7
[dmaap/messagerouter/msgrtr.git] / src / main / java / com / att / dmf / mr / beans / DMaaPKafkaConsumerFactory.java
1 /*******************************************************************************
2  *  ============LICENSE_START=======================================================
3  *  org.onap.dmaap
4  *  ================================================================================
5  *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
6  *  ================================================================================
7  *  Licensed under the Apache License, Version 2.0 (the "License");
8  *  you may not use this file except in compliance with the License.
9  *  You may obtain a copy of the License at
10  *        http://www.apache.org/licenses/LICENSE-2.0
11 *  
12  *  Unless required by applicable law or agreed to in writing, software
13  *  distributed under the License is distributed on an "AS IS" BASIS,
14  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  *  See the License for the specific language governing permissions and
16  *  limitations under the License.
17  *  ============LICENSE_END=========================================================
18  *  
19  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
20  *  
21  *******************************************************************************/
22 package com.att.dmf.mr.beans;
23
24 import java.net.InetAddress;
25 import java.net.UnknownHostException;
26 import java.util.Collection;
27 import java.util.HashMap;
28 import java.util.Properties;
29 import java.util.concurrent.TimeUnit;
30
31 import org.apache.curator.framework.CuratorFramework;
32 import org.apache.curator.framework.recipes.locks.InterProcessMutex;
33 import org.apache.kafka.clients.consumer.KafkaConsumer;
34 import org.springframework.beans.factory.annotation.Qualifier;
35
36 import com.att.ajsc.filemonitor.AJSCPropertiesMap;
37 import com.att.dmf.mr.CambriaApiException;
38 import com.att.dmf.mr.backends.Consumer;
39 import com.att.dmf.mr.backends.ConsumerFactory;
40 import com.att.dmf.mr.backends.MetricsSet;
41 import com.att.dmf.mr.backends.kafka.Kafka011Consumer;
42 import com.att.dmf.mr.backends.kafka.Kafka011ConsumerUtil;
43 import com.att.dmf.mr.backends.kafka.KafkaConsumerCache;
44 import com.att.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
45 import com.att.dmf.mr.backends.kafka.KafkaLiveLockAvoider2;
46 import com.att.dmf.mr.backends.kafka.LiveLockAvoidance;
47 import com.att.dmf.mr.constants.CambriaConstants;
48 import com.att.dmf.mr.utils.ConfigurationReader;
49
50
51 import com.att.eelf.configuration.EELFLogger;
52 import com.att.eelf.configuration.EELFManager;
53 import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
54
55 /**
56  * @author nilanjana.maity
57  *
58  */
59 public class DMaaPKafkaConsumerFactory implements ConsumerFactory {
60
61         
62         private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPKafkaConsumerFactory.class);
63         
64
65         /**
66          * constructor initialization
67          * 
68          * @param settings
69          * @param metrics
70          * @param curator
71          * @throws missingReqdSetting
72          * @throws KafkaConsumerCacheException
73          * @throws UnknownHostException
74          */
75
76         public DMaaPKafkaConsumerFactory(@Qualifier("dMaaPMetricsSet") MetricsSet metrics,
77                         @Qualifier("curator") CuratorFramework curator,
78                         @Qualifier("kafkalockavoid") KafkaLiveLockAvoider2 kafkaLiveLockAvoider)
79                         throws missingReqdSetting, KafkaConsumerCacheException, UnknownHostException {
80
81                 String apiNodeId = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
82                                 CambriaConstants.kSetting_ApiNodeIdentifier);
83                 if (apiNodeId == null) {
84
85                         apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port;
86                 }
87
88                 log.info("This Cambria API Node identifies itself as [" + apiNodeId + "].");
89                 final String mode = CambriaConstants.DMAAP;
90
91                 fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
92                                 "kafka.metadata.broker.list");
93                 if (null == fkafkaBrokers) {
94
95                         fkafkaBrokers = "localhost:9092";
96                 }
97
98                 boolean kSetting_EnableCache = kDefault_IsCacheEnabled;
99                 String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
100                                 "cambria.consumer.cache.enabled");
101                 if (null != strkSetting_EnableCache)
102                         kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
103
104                 final boolean isCacheEnabled = kSetting_EnableCache;
105
106                 
107                 fCache = null;
108                 if (isCacheEnabled) {
109                         fCache = KafkaConsumerCache.getInstance();
110
111                 }
112                 if (fCache != null) {
113                         fCache.setfMetrics(metrics);
114                         fCache.setfApiId(apiNodeId);
115                         fCache.startCache(mode, curator);
116                         if(kafkaLiveLockAvoider!=null){
117                         kafkaLiveLockAvoider.startNewWatcherForServer(apiNodeId, makeAvoidanceCallback(apiNodeId));
118                         fkafkaLiveLockAvoider = kafkaLiveLockAvoider;
119                         }
120                 }
121         }
122
123         /*
124          * getConsumerFor
125          * 
126          * @see
127          * com.att.dmf.mr.backends.ConsumerFactory#getConsumerFor(java.lang.String,
128          * java.lang.String, java.lang.String, int, java.lang.String) This method is
129          * used by EventServiceImpl.getEvents() method to get a Kakfa consumer
130          * either from kafkaconsumer cache or create a new connection This also get
131          * the list of other consumer objects for the same consumer group and set to
132          * KafkaConsumer object. This list may be used during poll-rebalancing
133          * issue.
134          */
135         @Override
136         public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs,
137                         String remotehost) throws UnavailableException, CambriaApiException {
138                 Kafka011Consumer kc;
139
140                 // To synchronize based on the consumer group.
141
142                 Object syncObject = synchash.get(topic + consumerGroupName);
143                 if (null == syncObject) {
144                         syncObject = new Object();
145                         synchash.put(topic + consumerGroupName, syncObject);
146                 }
147
148                 synchronized (syncObject) {
149                         try {
150                                 kc = (fCache != null) ? fCache.getConsumerFor(topic, consumerGroupName, consumerId) : null; // consumerId
151
152                         } catch (KafkaConsumerCacheException e) {
153                                 log.info("######@@@@### Error occured in Kafka Caching" + e + "  " + topic + "::" + consumerGroupName
154                                                 + "::" + consumerId);
155                                 log.error("####@@@@## Error occured in Kafka Caching" + e + "  " + topic + "::" + consumerGroupName
156                                                 + "::" + consumerId);
157                                 throw new UnavailableException(e);
158                         }
159
160                         // Ideally if cache exists below flow should be skipped. If cache
161                         // didnt
162                         // exist, then create this first time on this node.
163                         if (kc == null) {
164
165                                 log.info("^Kafka consumer cache value " + topic + "::" + consumerGroupName + "::" + consumerId + " =>"
166                                                 + kc);
167
168                                 final InterProcessMutex ipLock = new InterProcessMutex(ConfigurationReader.getCurator(),
169                                                 "/consumerFactory/" + topic + "/" + consumerGroupName + "/" + consumerId);
170                                 boolean locked = false;
171
172                                 try {
173
174                                         locked = ipLock.acquire(30, TimeUnit.SECONDS);
175                                         if (!locked) {
176
177                                                 log.info("Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic
178                                                                 + ", " + consumerGroupName + ", " + consumerId + ") from " + remotehost);
179                                                 throw new UnavailableException(
180                                                                 "Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic
181                                                                                 + ", " + consumerGroupName + ", " + consumerId + ") " + remotehost);
182                                         }
183
184                                         // ConfigurationReader.getCurator().checkExists().forPath("S").
185
186                                         log.info("Creating Kafka consumer for group [" + consumerGroupName + "], consumer [" + consumerId
187                                                         + "], on topic [" + topic + "].");
188                                         
189                                         if (fCache != null) {
190                                                 fCache.signalOwnership(topic, consumerGroupName, consumerId);
191                                         }
192                                         
193                                         final Properties props = createConsumerConfig(topic,consumerGroupName, consumerId);
194                                         long fCreateTimeMs = System.currentTimeMillis();
195                                         KafkaConsumer<String, String> cc = new KafkaConsumer<>(props);
196                                         kc = new Kafka011Consumer(topic, consumerGroupName, consumerId, cc, fkafkaLiveLockAvoider);
197                                         log.info(" kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
198
199                                         if (fCache != null) {
200                                                 fCache.putConsumerFor(topic, consumerGroupName, consumerId, kc); //
201                                         }
202
203                                 } catch (org.I0Itec.zkclient.exception.ZkTimeoutException x) {
204                                         log.info(
205                                                         "Kafka consumer couldn't connect to ZK. " + x + " " + consumerGroupName + "/" + consumerId);
206                                         throw new UnavailableException("Couldn't connect to ZK.");
207                                 } catch (KafkaConsumerCacheException e) {
208                                         log.info("Failed to cache consumer (this may have performance implications): " + e.getMessage()
209                                                         + " " + consumerGroupName + "/" + consumerId);
210                                 } catch (UnavailableException u) {
211                                         log.info("Failed and in UnavailableException block " + u.getMessage() + " " + consumerGroupName
212                                                         + "/" + consumerId);
213                                         throw new UnavailableException("Error while acquiring consumer factory lock " + u.getMessage(), u);
214                                 } catch (Exception e) {
215                                         log.info("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/"
216                                                         + consumerId);
217                                         log.error("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/"
218                                                         + consumerId);
219                                         
220                                 } finally {
221                                         if (locked) {
222                                                 try {
223                                                         ipLock.release();
224                                                 } catch (Exception e) {
225                                                         throw new UnavailableException("Error while releasing consumer factory lock" + e, e);
226                                                 }
227                                         }
228                                 }
229                         }
230                 }
231                 return kc;
232         }
233
234         @Override
235         public synchronized void destroyConsumer(String topic, String consumerGroup, String clientId) {
236                 if (fCache != null) {
237                         fCache.dropConsumer(topic, consumerGroup, clientId);
238                 }
239         }
240
241         @Override
242         public synchronized Collection<? extends Consumer> getConsumers() {
243                 return fCache.getConsumers();
244         }
245
246         @Override
247         public synchronized void dropCache() {
248                 fCache.dropAllConsumers();
249         }
250
251         
252         private KafkaConsumerCache fCache;
253         private KafkaLiveLockAvoider2 fkafkaLiveLockAvoider;
254         private String fkafkaBrokers;
255
256
257
258         private static String makeLongKey(String key, String prefix) {
259                 return prefix + "." + key;
260         }
261
262         private void transferSettingIfProvided(Properties target, String key, String prefix) {
263                 String keyVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, makeLongKey(key, prefix));
264
265                 
266                 if (null != keyVal) {
267                 
268                         log.info("Setting [" + key + "] to " + keyVal + ".");
269                         target.put(key, keyVal);
270                 }
271         }
272
273         /**
274          * Name CreateConsumerconfig  
275          * @param topic
276          * @param groupId
277          * @param consumerId
278          * @return Properties
279          * 
280          * This method is to create Properties required to create kafka connection
281          * Group name is replaced with different format groupid--topic to address same 
282          * groupids for multiple topics. Same groupid with multiple topics 
283          * may start frequent consumer rebalancing on all the topics . Replacing them makes it unique
284          */
285         private Properties createConsumerConfig(String topic ,String groupId, String consumerId) {
286                 final Properties props = new Properties();
287                 //fakeGroupName is added to avoid multiple consumer group for multiple topics.Donot Change this logic
288                 //Fix for CPFMF-644 : 
289                 final String fakeGroupName = groupId + "--" + topic;
290                 props.put("group.id", fakeGroupName);
291                 props.put("enable.auto.commit", "false"); // 0.11
292                 props.put("bootstrap.servers", fkafkaBrokers);
293                 props.put("sasl.jaas.config",
294                                 "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
295                 props.put("security.protocol", "SASL_PLAINTEXT");
296                 props.put("sasl.mechanism", "PLAIN");
297                 
298                 props.put("client.id", consumerId);
299
300                 // additional settings: start with our defaults, then pull in configured
301                 // overrides
302                 populateKafkaInternalDefaultsMap();
303                 for (String key : KafkaConsumerKeys) {
304                         transferSettingIfProvided(props, key, "kafka");
305                 }
306
307                 props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
308                 props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
309
310                 return props;
311         }
312
313
314         private static final String KafkaConsumerKeys[] = { "bootstrap.servers", "heartbeat.interval.ms",
315                         "auto.offset.reset", "exclude.internal.topics", "session.timeout.ms", "fetch.max.bytes",
316                         "auto.commit.interval.ms", "connections.max.idle.ms", "fetch.min.bytes", "isolation.level",
317                         "fetch.max.bytes", "request.timeout.ms", "fetch.max.wait.bytes", "reconnect.backoff.max.ms",
318                         "max.partition.fetch.bytes", "reconnect.backoff.max.ms", "reconnect.backoff.ms", "retry.backoff.ms",
319                         "max.poll.interval.ms", "max.poll.records", "receive.buffer.bytes", "metadata.max.age.ms" };
320
321         /**
322          * putting values in hashmap like consumer timeout, zookeeper time out, etc
323          * 
324          * @param setting
325          */
326         private static void populateKafkaInternalDefaultsMap() { }
327
328         /*
329          * The starterIncremnt value is just to emulate calling certain consumers,
330          * in this test app all the consumers are local
331          * 
332          */
333         private LiveLockAvoidance makeAvoidanceCallback(final String appId) {
334
335                 return new LiveLockAvoidance() {
336
337                         @Override
338                         public String getAppId() {
339                                 return appId;
340                         }
341
342                         @Override
343                         public void handleRebalanceUnlock(String groupName) {
344                                 log.info("FORCE A POLL NOW FOR appId: [{}] group: [{}]", getAppId(), groupName);
345                                 Kafka011ConsumerUtil.forcePollOnConsumer(groupName + "::");
346                         }
347
348                 };
349
350         }
351
352         @SuppressWarnings("rawtypes")
353         @Override
354         public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs,
355                         String remotehost) throws UnavailableException, CambriaApiException {
356                 // TODO Auto-generated method stub
357                 return null;
358         }
359
360         private HashMap<String, Object> synchash = new HashMap<String, Object>();
361
362 }