update the testcases after the kafka 11 changes 49/60549/1
authorsunil unnava <su622b@att.com>
Tue, 14 Aug 2018 13:34:46 +0000 (09:34 -0400)
committersunil unnava <su622b@att.com>
Tue, 14 Aug 2018 13:39:23 +0000 (09:39 -0400)
Issue-ID: DMAAP-526
Change-Id: I477a8ee05fb3cdd76af726b6ca0d1a69aa9eef93
Signed-off-by: sunil unnava <su622b@att.com>
209 files changed:
.project
pom.xml
src/main/java/com/att/dmf/mr/CambriaApiException.java [moved from src/main/java/com/att/nsa/cambria/CambriaApiException.java with 96% similarity]
src/main/java/com/att/dmf/mr/CambriaApiVersionInfo.java [moved from src/main/java/com/att/nsa/cambria/CambriaApiVersionInfo.java with 98% similarity]
src/main/java/com/att/dmf/mr/backends/Consumer.java [moved from src/main/java/com/att/nsa/cambria/backends/Consumer.java with 88% similarity]
src/main/java/com/att/dmf/mr/backends/ConsumerFactory.java [moved from src/main/java/com/att/nsa/cambria/backends/ConsumerFactory.java with 83% similarity]
src/main/java/com/att/dmf/mr/backends/MetricsSet.java [moved from src/main/java/com/att/nsa/cambria/backends/MetricsSet.java with 97% similarity]
src/main/java/com/att/dmf/mr/backends/Publisher.java [moved from src/main/java/com/att/nsa/cambria/backends/Publisher.java with 84% similarity]
src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java [new file with mode: 0644]
src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java [new file with mode: 0644]
src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt [new file with mode: 0644]
src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumerCache.java [moved from src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCache.java with 64% similarity]
src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java [new file with mode: 0644]
src/main/java/com/att/dmf/mr/backends/kafka/KafkaPublisher.java [moved from src/main/java/com/att/nsa/cambria/backends/kafka/KafkaPublisher.java with 61% similarity]
src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java [new file with mode: 0644]
src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java [new file with mode: 0644]
src/main/java/com/att/dmf/mr/backends/memory/MemoryConsumerFactory.java [moved from src/main/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactory.java with 82% similarity]
src/main/java/com/att/dmf/mr/backends/memory/MemoryMetaBroker.java [moved from src/main/java/com/att/nsa/cambria/backends/memory/MemoryMetaBroker.java with 96% similarity]
src/main/java/com/att/dmf/mr/backends/memory/MemoryQueue.java [moved from src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueue.java with 95% similarity]
src/main/java/com/att/dmf/mr/backends/memory/MemoryQueuePublisher.java [moved from src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisher.java with 83% similarity]
src/main/java/com/att/dmf/mr/backends/memory/MessageLogger.java [moved from src/main/java/com/att/nsa/cambria/backends/memory/MessageLogger.java with 85% similarity]
src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java [moved from src/main/java/com/att/nsa/cambria/beans/ApiKeyBean.java with 97% similarity]
src/main/java/com/att/dmf/mr/beans/DMaaPCambriaLimiter.java [moved from src/main/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiter.java with 63% similarity]
src/main/java/com/att/dmf/mr/beans/DMaaPContext.java [moved from src/main/java/com/att/nsa/cambria/beans/DMaaPContext.java with 95% similarity]
src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java [new file with mode: 0644]
src/main/java/com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java [moved from src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBroker.java with 71% similarity]
src/main/java/com/att/dmf/mr/beans/DMaaPMetricsSet.java [moved from src/main/java/com/att/nsa/cambria/beans/DMaaPMetricsSet.java with 97% similarity]
src/main/java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java [moved from src/main/java/com/att/nsa/cambria/beans/DMaaPNsaApiDb.java with 97% similarity]
src/main/java/com/att/dmf/mr/beans/DMaaPZkClient.java [moved from src/main/java/com/att/nsa/cambria/beans/DMaaPZkClient.java with 94% similarity]
src/main/java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java [moved from src/main/java/com/att/nsa/cambria/beans/DMaaPZkConfigDb.java with 93% similarity]
src/main/java/com/att/dmf/mr/beans/LogDetails.java [moved from src/main/java/com/att/nsa/cambria/beans/LogDetails.java with 97% similarity]
src/main/java/com/att/dmf/mr/beans/TopicBean.java [moved from src/main/java/com/att/nsa/cambria/beans/TopicBean.java with 96% similarity]
src/main/java/com/att/dmf/mr/constants/CambriaConstants.java [moved from src/main/java/com/att/nsa/cambria/constants/CambriaConstants.java with 93% similarity]
src/main/java/com/att/dmf/mr/exception/DMaaPAccessDeniedException.java [moved from src/main/java/com/att/nsa/cambria/exception/DMaaPAccessDeniedException.java with 94% similarity]
src/main/java/com/att/dmf/mr/exception/DMaaPCambriaExceptionMapper.java [moved from src/main/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java with 96% similarity]
src/main/java/com/att/dmf/mr/exception/DMaaPErrorMessages.java [moved from src/main/java/com/att/nsa/cambria/exception/DMaaPErrorMessages.java with 84% similarity]
src/main/java/com/att/dmf/mr/exception/DMaaPResponseCode.java [moved from src/main/java/com/att/nsa/cambria/exception/DMaaPResponseCode.java with 97% similarity]
src/main/java/com/att/dmf/mr/exception/DMaaPWebExceptionMapper.java [moved from src/main/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapper.java with 98% similarity]
src/main/java/com/att/dmf/mr/exception/ErrorResponse.java [moved from src/main/java/com/att/nsa/cambria/exception/ErrorResponse.java with 98% similarity]
src/main/java/com/att/dmf/mr/listener/CambriaServletContextListener.java [moved from src/main/java/com/att/nsa/cambria/listener/CambriaServletContextListener.java with 97% similarity]
src/main/java/com/att/dmf/mr/listener/DME2EndPointLoader.java [moved from src/main/java/com/att/nsa/cambria/listener/DME2EndPointLoader.java with 97% similarity]
src/main/java/com/att/dmf/mr/metabroker/Broker.java [moved from src/main/java/com/att/nsa/cambria/metabroker/Broker.java with 92% similarity]
src/main/java/com/att/dmf/mr/metabroker/Broker1.java [new file with mode: 0644]
src/main/java/com/att/dmf/mr/metabroker/Topic.java [moved from src/main/java/com/att/nsa/cambria/metabroker/Topic.java with 97% similarity]
src/main/java/com/att/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java [moved from src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java with 96% similarity]
src/main/java/com/att/dmf/mr/metrics/publisher/CambriaClient.java [moved from src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaClient.java with 97% similarity]
src/main/java/com/att/dmf/mr/metrics/publisher/CambriaConsumer.java [moved from src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaConsumer.java with 96% similarity]
src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisher.java [moved from src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisher.java with 97% similarity]
src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisherUtility.java [moved from src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java with 97% similarity]
src/main/java/com/att/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java [moved from src/main/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java with 96% similarity]
src/main/java/com/att/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java [moved from src/main/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java with 83% similarity]
src/main/java/com/att/dmf/mr/metrics/publisher/impl/Clock.java [moved from src/main/java/com/att/nsa/cambria/metrics/publisher/impl/Clock.java with 90% similarity]
src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java [moved from src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java with 94% similarity]
src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java [moved from src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java with 93% similarity]
src/main/java/com/att/dmf/mr/resources/CambriaEventSet.java [moved from src/main/java/com/att/nsa/cambria/resources/CambriaEventSet.java with 87% similarity]
src/main/java/com/att/dmf/mr/resources/CambriaOutboundEventStream.java [moved from src/main/java/com/att/nsa/cambria/resources/CambriaOutboundEventStream.java with 50% similarity]
src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java [moved from src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java with 89% similarity]
src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java [moved from src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java with 92% similarity]
src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaStreamReader.java [moved from src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReader.java with 95% similarity]
src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java [moved from src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java with 89% similarity]
src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticator.java [moved from src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticator.java with 93% similarity]
src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java [moved from src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java with 61% similarity]
src/main/java/com/att/dmf/mr/security/DMaaPAuthenticator.java [moved from src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticator.java with 95% similarity]
src/main/java/com/att/dmf/mr/security/DMaaPAuthenticatorImpl.java [moved from src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImpl.java with 90% similarity]
src/main/java/com/att/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java [moved from src/main/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java with 94% similarity]
src/main/java/com/att/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java [moved from src/main/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java with 98% similarity]
src/main/java/com/att/dmf/mr/service/AdminService.java [moved from src/main/java/com/att/nsa/cambria/service/AdminService.java with 96% similarity]
src/main/java/com/att/dmf/mr/service/ApiKeysService.java [moved from src/main/java/com/att/nsa/cambria/service/ApiKeysService.java with 96% similarity]
src/main/java/com/att/dmf/mr/service/EventsService.java [moved from src/main/java/com/att/nsa/cambria/service/EventsService.java with 89% similarity]
src/main/java/com/att/dmf/mr/service/MMService.java [moved from src/main/java/com/att/nsa/cambria/service/MMService.java with 87% similarity]
src/main/java/com/att/dmf/mr/service/MetricsService.java [moved from src/main/java/com/att/nsa/cambria/service/MetricsService.java with 90% similarity]
src/main/java/com/att/dmf/mr/service/TopicService.java [moved from src/main/java/com/att/nsa/cambria/service/TopicService.java with 95% similarity]
src/main/java/com/att/dmf/mr/service/TransactionService.java [moved from src/main/java/com/att/nsa/cambria/service/TransactionService.java with 92% similarity]
src/main/java/com/att/dmf/mr/service/UIService.java [moved from src/main/java/com/att/nsa/cambria/service/UIService.java with 93% similarity]
src/main/java/com/att/dmf/mr/service/impl/AdminServiceImpl.java [moved from src/main/java/com/att/nsa/cambria/service/impl/AdminServiceImpl.java with 90% similarity]
src/main/java/com/att/dmf/mr/service/impl/ApiKeysServiceImpl.java [moved from src/main/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImpl.java with 94% similarity]
src/main/java/com/att/dmf/mr/service/impl/BaseTransactionDbImpl.java [moved from src/main/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImpl.java with 93% similarity]
src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java [new file with mode: 0644]
src/main/java/com/att/dmf/mr/service/impl/MMServiceImpl.java [moved from src/main/java/com/att/nsa/cambria/service/impl/MMServiceImpl.java with 90% similarity]
src/main/java/com/att/dmf/mr/service/impl/MetricsServiceImpl.java [moved from src/main/java/com/att/nsa/cambria/service/impl/MetricsServiceImpl.java with 91% similarity]
src/main/java/com/att/dmf/mr/service/impl/TopicServiceImpl.java [moved from src/main/java/com/att/nsa/cambria/service/impl/TopicServiceImpl.java with 57% similarity]
src/main/java/com/att/dmf/mr/service/impl/TransactionServiceImpl.java [moved from src/main/java/com/att/nsa/cambria/service/impl/TransactionServiceImpl.java with 94% similarity]
src/main/java/com/att/dmf/mr/service/impl/UIServiceImpl.java [moved from src/main/java/com/att/nsa/cambria/service/impl/UIServiceImpl.java with 93% similarity]
src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionFactory.java [moved from src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionFactory.java with 95% similarity]
src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObj.java [moved from src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObj.java with 97% similarity]
src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObjDB.java [moved from src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObjDB.java with 96% similarity]
src/main/java/com/att/dmf/mr/transaction/TransactionObj.java [moved from src/main/java/com/att/nsa/cambria/transaction/TransactionObj.java with 98% similarity]
src/main/java/com/att/dmf/mr/transaction/TrnRequest.java [moved from src/main/java/com/att/nsa/cambria/transaction/TrnRequest.java with 98% similarity]
src/main/java/com/att/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java [moved from src/main/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java with 89% similarity]
src/main/java/com/att/dmf/mr/utils/ConfigurationReader.java [moved from src/main/java/com/att/nsa/cambria/utils/ConfigurationReader.java with 89% similarity]
src/main/java/com/att/dmf/mr/utils/DMaaPCuratorFactory.java [moved from src/main/java/com/att/nsa/cambria/utils/DMaaPCuratorFactory.java with 96% similarity]
src/main/java/com/att/dmf/mr/utils/DMaaPResponseBuilder.java [moved from src/main/java/com/att/nsa/cambria/utils/DMaaPResponseBuilder.java with 96% similarity]
src/main/java/com/att/dmf/mr/utils/Emailer.java [moved from src/main/java/com/att/nsa/cambria/utils/Emailer.java with 97% similarity]
src/main/java/com/att/dmf/mr/utils/PropertyReader.java [moved from src/main/java/com/att/nsa/cambria/utils/PropertyReader.java with 91% similarity]
src/main/java/com/att/dmf/mr/utils/Utils.java [moved from src/main/java/com/att/nsa/cambria/utils/Utils.java with 97% similarity]
src/main/java/com/att/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java [moved from src/main/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java with 90% similarity]
src/main/java/com/att/mr/filter/ContentLengthFilter.java [moved from src/main/java/com/att/nsa/filter/ContentLengthFilter.java with 94% similarity]
src/main/java/com/att/mr/filter/DefaultLength.java [moved from src/main/java/com/att/nsa/filter/DefaultLength.java with 97% similarity]
src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumer.java [deleted file]
src/main/java/com/att/nsa/cambria/backends/memory/MessageDropper.java [deleted file]
src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java [deleted file]
src/main/java/com/att/nsa/cambria/beans/ZkClientFactory.java [deleted file]
src/main/java/com/att/nsa/cambria/service/impl/EventsServiceImpl.java [deleted file]
src/test/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSenderTest.java
src/test/java/com/att/nsa/cambria/CambriaApiExceptionTest.java
src/test/java/com/att/nsa/cambria/CambriaApiVersionInfoTest.java
src/test/java/com/att/nsa/cambria/backends/kafka/JUnitTestSuite.java
src/test/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCacheTest.java
src/test/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerTest.java [deleted file]
src/test/java/com/att/nsa/cambria/backends/kafka/KafkaPublisherTest.java
src/test/java/com/att/nsa/cambria/backends/kafka/MetricsSetImpl.java
src/test/java/com/att/nsa/cambria/backends/memory/JUnitTestSuite.java
src/test/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactoryTest.java
src/test/java/com/att/nsa/cambria/backends/memory/MemoryMetaBrokerTest.java
src/test/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisherTest.java
src/test/java/com/att/nsa/cambria/backends/memory/MemoryQueueTest.java
src/test/java/com/att/nsa/cambria/backends/memory/MessageDropperTest.java [deleted file]
src/test/java/com/att/nsa/cambria/backends/memory/MessageLoggerTest.java
src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest.java
src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest2.java
src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest3.java
src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest4.java
src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest5.java
src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest6.java
src/test/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiterTest.java
src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest.java
src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest2.java
src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest3.java
src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest4.java
src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest5.java
src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest6.java
src/test/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBrokerTest.java
src/test/java/com/att/nsa/cambria/beans/JUnitTestSuite.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest10.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest11.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest12.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest13.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest14.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest15.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest16.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest17.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest18.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest2.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest3.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest4.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest5.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest6.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest7.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest8.java
src/test/java/com/att/nsa/cambria/beans/LogDetailsTest9.java
src/test/java/com/att/nsa/cambria/beans/TopicBeanTest.java
src/test/java/com/att/nsa/cambria/beans/TopicBeanTest10.java
src/test/java/com/att/nsa/cambria/beans/TopicBeanTest2.java
src/test/java/com/att/nsa/cambria/beans/TopicBeanTest3.java
src/test/java/com/att/nsa/cambria/beans/TopicBeanTest4.java
src/test/java/com/att/nsa/cambria/beans/TopicBeanTest5.java
src/test/java/com/att/nsa/cambria/beans/TopicBeanTest6.java
src/test/java/com/att/nsa/cambria/beans/TopicBeanTest7.java
src/test/java/com/att/nsa/cambria/beans/TopicBeanTest8.java
src/test/java/com/att/nsa/cambria/beans/TopicBeanTest9.java
src/test/java/com/att/nsa/cambria/embed/EmbedConfigurationReader.java
src/test/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapperTest.java
src/test/java/com/att/nsa/cambria/exception/DMaaPErrorMessagesTest.java
src/test/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapperTest.java
src/test/java/com/att/nsa/cambria/exception/ErrorResponseTest.java
src/test/java/com/att/nsa/cambria/listener/CambriaServletContextListenerTest.java
src/test/java/com/att/nsa/cambria/listener/DME2EndPointLoaderTest.java
src/test/java/com/att/nsa/cambria/metabroker/BrokerImpl.java
src/test/java/com/att/nsa/cambria/metabroker/BrokerImplTest.java
src/test/java/com/att/nsa/cambria/metabroker/TopicImplem.java
src/test/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtilityTest.java
src/test/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactoryTest.java
src/test/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClientTest.java
src/test/java/com/att/nsa/cambria/metrics/publisher/impl/ClockTest.java
src/test/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImplTest.java
src/test/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisherTest.java
src/test/java/com/att/nsa/cambria/resources/CambriaEventSetTest.java
src/test/java/com/att/nsa/cambria/resources/CambriaOutboundEventStreamTest.java
src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReaderTest.java
src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReaderTest.java
src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReaderTest.java
src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReaderTest.java
src/test/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImplTest.java
src/test/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImplTest.java
src/test/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticatorTest.java
src/test/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticatorTest.java
src/test/java/com/att/nsa/cambria/service/impl/AdminServiceImplemTest.java
src/test/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImplTest.java
src/test/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImplTest.java
src/test/java/com/att/nsa/cambria/service/impl/EventsServiceImplTest.java
src/test/java/com/att/nsa/cambria/service/impl/JUnitTestSuite.java
src/test/java/com/att/nsa/cambria/service/impl/MMServiceImplTest.java
src/test/java/com/att/nsa/cambria/service/impl/MessageTest.java
src/test/java/com/att/nsa/cambria/service/impl/MetricsServiceImplTest.java
src/test/java/com/att/nsa/cambria/service/impl/ShowConsumerCacheTest.java
src/test/java/com/att/nsa/cambria/service/impl/TopicServiceImplTest.java
src/test/java/com/att/nsa/cambria/service/impl/TransactionServiceImplTest.java
src/test/java/com/att/nsa/cambria/service/impl/UIServiceImplTest.java
src/test/java/com/att/nsa/cambria/transaction/TransactionObjTest.java
src/test/java/com/att/nsa/cambria/transaction/TrnRequestTest.java
src/test/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactoryTest.java
src/test/java/com/att/nsa/cambria/utils/ConfigurationReaderTest.java
src/test/java/com/att/nsa/cambria/utils/DMaaPCuratorFactoryTest.java
src/test/java/com/att/nsa/cambria/utils/DMaaPResponseBuilderTest.java
src/test/java/com/att/nsa/cambria/utils/UtilsTest.java
src/test/java/com/att/nsa/filter/ContentLengthFilterTest.java
src/test/java/com/att/nsa/filter/DefaultLengthTest.java
version.properties

index 6d37451..06dc78d 100644 (file)
--- a/.project
+++ b/.project
@@ -5,6 +5,11 @@
        <projects>\r
        </projects>\r
        <buildSpec>\r
+               <buildCommand>\r
+                       <name>org.eclipse.wst.common.project.facet.core.builder</name>\r
+                       <arguments>\r
+                       </arguments>\r
+               </buildCommand>\r
                <buildCommand>\r
                        <name>org.eclipse.jdt.core.javabuilder</name>\r
                        <arguments>\r
@@ -36,5 +41,6 @@
                <nature>net.sf.eclipsecs.core.CheckstyleNature</nature>\r
                <nature>org.eclipse.jdt.core.javanature</nature>\r
                <nature>org.eclipse.m2e.core.maven2Nature</nature>\r
+               <nature>org.eclipse.wst.common.project.facet.core.nature</nature>\r
        </natures>\r
 </projectDescription>\r
diff --git a/pom.xml b/pom.xml
index 40f3ce8..870b08b 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -14,7 +14,7 @@
        <modelVersion>4.0.0</modelVersion>
        <groupId>org.onap.dmaap.messagerouter.msgrtr</groupId>
        <artifactId>msgrtr</artifactId>
-       <version>1.1.4-SNAPSHOT</version>
+       <version>1.1.5-SNAPSHOT</version>
        <packaging>jar</packaging>
        <name>dmaap-messagerouter-msgrtr</name>
        <description>Message Router - Restful interface built for kafka</description>
                <!-- Apache Kafka -->
                <dependency>
                        <groupId>org.apache.kafka</groupId>
-                       <artifactId>kafka_2.10</artifactId>
-                       <version>0.8.2.1</version>
+                       <artifactId>kafka_2.11</artifactId>
+               <version>0.11.0.0</version>
                </dependency>
 
                <dependency>
@@ -8,23 +8,23 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria;
+package com.att.dmf.mr;
 
 import org.json.JSONObject;
 
+import com.att.dmf.mr.exception.ErrorResponse;
 import com.att.nsa.apiServer.NsaAppException;
-import com.att.nsa.cambria.exception.ErrorResponse;
 
 public class CambriaApiException extends NsaAppException
 {
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria;
+package com.att.dmf.mr;
 
 import java.io.IOException;
 import java.io.InputStream;
@@ -30,7 +30,7 @@ import com.att.eelf.configuration.EELFManager;
 /**
  * CambriaApiVersionInfo will provide the version of cambria code
  * 
- * @author author
+ * @author peter
  *
  */
 public class CambriaApiVersionInfo {
@@ -8,28 +8,30 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.backends;
+package com.att.dmf.mr.backends;
+
+import java.util.ArrayList;
 
 /**
  * A consumer interface. Consumers pull the next message from a given topic.
- * @author author
+ * @author peter
  */
 public interface Consumer
 {      
        /**
         * A message interface provide the offset and message
-        * @author author
+        * @author nilanjana.maity
         *
         */
        public interface Message
@@ -75,12 +77,14 @@ public interface Consumer
         * @param atOffset start with the next message at or after atOffset. -1 means next from last request
         * @return the next message, or null if none are waiting
         */
+//     Message nextMessage ( long atOffset );
 
-
+       //Message nextMessage (ArrayList cl);
        /**
         * Close/clean up this consumer
+        * @return 
         */
-       void close();
+       boolean close();
        
        /**
         * Commit the offset of the last consumed message
@@ -93,4 +97,10 @@ public interface Consumer
         * @return offset
         */
        long getOffset();
+       
+       void setOffset(long offset);
+       
+       //public        org.apache.kafka.clients.consumer.KafkaConsumer getConsumer();
+       
+       
 }
@@ -8,25 +8,28 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.backends;
+package com.att.dmf.mr.backends;
 
 import java.util.Collection;
+import java.util.HashMap;
+
+import com.att.dmf.mr.CambriaApiException;
 
 /**
  * This is the factory class to instantiate the consumer
  * 
- * @author author
+ * @author nilanjana.maity
  *
  */
 
@@ -37,7 +40,7 @@ public interface ConsumerFactory {
        /**
         * User defined exception for Unavailable Exception
         * 
-        * @author author
+        * @author nilanjana.maity
         *
         */
        public class UnavailableException extends Exception {
@@ -87,8 +90,8 @@ public interface ConsumerFactory {
         * @return
         * @throws UnavailableException
         */
-       public Consumer getConsumerFor(String topic, String consumerGroupId,
-                       String clientId, int timeoutMs) throws UnavailableException;
+       //public Consumer getConsumerFor(String topic, String consumerGroupId,
+               //      String clientId, int timeoutMs) throws UnavailableException;
 
        /**
         * For factories that employ a caching mechanism, this allows callers to
@@ -107,4 +110,10 @@ public interface ConsumerFactory {
         * @return a collection of consumers
         */
        public Collection<? extends Consumer> getConsumers();
+
+       public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs, String remotehost) throws UnavailableException, CambriaApiException;
+       public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs, String remotehost) throws UnavailableException, CambriaApiException;
+
+
+       
 }
@@ -8,23 +8,23 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.backends;
+package com.att.dmf.mr.backends;
 
 import com.att.nsa.metrics.CdmMetricsRegistry;
 /**
  * This interface will help to generate metrics
- * @author author
+ * @author nilanjana.maity
  *
  */
 public interface MetricsSet extends CdmMetricsRegistry{
@@ -8,36 +8,37 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.backends;
+package com.att.dmf.mr.backends;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import kafka.producer.KeyedMessage;
+import org.apache.kafka.clients.producer.ProducerRecord;
 
-import com.att.nsa.cambria.beans.LogDetails;
+import com.att.dmf.mr.beans.LogDetails;
 
+//import kafka.producer.KeyedMessage;
 /**
  * A publisher interface. Publishers receive messages and post them to a topic.
- * @author author
+ * @author peter
  */
 public interface Publisher
 {
        /**
         * A message interface. The message has a key and a body.
-        * @author author
+        * @author peter
         */
        public interface message
        {
@@ -94,5 +95,7 @@ public interface Publisher
         */
        public void sendMessages ( String topic, List<? extends message> msgs ) throws IOException;
        
-       public void sendBatchMessage(String topic ,ArrayList<KeyedMessage<String,String>> kms) throws IOException;
+       //public void sendBatchMessage(String topic ,ArrayList<KeyedMessage<String,String>> kms) throws IOException;
+       public void sendBatchMessageNew(String topic ,ArrayList<ProducerRecord<String,String>> kms) throws IOException;
+       public void sendMessagesNew( String topic, List<? extends message> msgs ) throws IOException;
 }
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java b/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java
new file mode 100644 (file)
index 0000000..f7f5ba7
--- /dev/null
@@ -0,0 +1,403 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.dmf.mr.backends.kafka;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.KafkaException;
+
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.constants.CambriaConstants;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * A consumer instance that's created per-request. These are stateless so that
+ * clients can connect to this service as a proxy.
+ * 
+ * @author Ram
+ *
+ */
+public class Kafka011Consumer implements Consumer {
+       private enum State {
+               OPENED, CLOSED
+       }
+
+       // @Autowired
+       // KafkaLiveLockAvoider kafkaLiveLockAvoider;
+       /**
+        * KafkaConsumer() is constructor. It has following 4 parameters:-
+        * 
+        * @param topic
+        * @param group
+        * @param id
+        * @param cc
+        * 
+        */
+
+       public Kafka011Consumer(String topic, String group, String id, KafkaConsumer<String, String> cc,
+                       KafkaLiveLockAvoider2 klla) throws Exception {
+               fTopic = topic;
+               fGroup = group;
+               fId = id;
+               fCreateTimeMs = System.currentTimeMillis();
+               fLastTouch = fCreateTimeMs;
+               fPendingMsgs = new LinkedBlockingQueue<ConsumerRecord<String, String>>();
+               fLogTag = fGroup + "(" + fId + ")/" + fTopic;
+               offset = 0;
+               state = Kafka011Consumer.State.OPENED;
+               kConsumer = cc;
+               fKafkaLiveLockAvoider = klla;
+               synchronized (kConsumer) {
+                       kConsumer.subscribe(Arrays.asList(topic));
+               }
+       }
+
+       private Consumer.Message makeMessage(final ConsumerRecord<String, String> msg) {
+               return new Consumer.Message() {
+                       @Override
+                       public long getOffset() {
+                               offset = msg.offset();
+                               return offset;
+                       }
+
+                       @Override
+                       public String getMessage() {
+                               return new String(msg.value());
+                       }
+               };
+       }
+
+       @Override
+       public synchronized Consumer.Message nextMessage() {
+
+               try {
+                       if (fPendingMsgs.size() > 0) {
+                               return makeMessage(fPendingMsgs.take());
+                       }
+               } catch (InterruptedException x) {
+                       log.warn("After size>0, pending msg take() threw InterruptedException. Ignoring. (" + x.getMessage() + ")",
+                                       x);
+               }
+
+               Callable<Boolean> run = new Callable<Boolean>() {
+                       @Override
+                       public Boolean call() throws Exception {
+                               try {
+                                       ConsumerRecords<String, String> records;
+                                       synchronized (kConsumer) {
+                                               records = kConsumer.poll(500);
+                                       }
+                                       for (ConsumerRecord<String, String> record : records) {
+                                               // foundMsgs = true;
+                                               fPendingMsgs.offer(record);
+                                       }
+
+                               } catch (KafkaException x) {
+                                       log.debug(fLogTag + ": KafkaException " + x.getMessage());
+
+                               } catch (java.lang.IllegalStateException | java.lang.IllegalArgumentException x) {
+                                       log.error(fLogTag + ": Illegal state/arg exception in Kafka consumer; dropping stream. "
+                                                       + x.getMessage());
+
+                               }
+
+                               // return null;
+                               return true;
+                       }
+               };
+
+               @SuppressWarnings({ "rawtypes", "unchecked" })
+               RunnableFuture future = new FutureTask(run);
+               ExecutorService service = Executors.newSingleThreadExecutor();
+               service.execute(future);
+               try {
+                       future.get(5, TimeUnit.SECONDS); // wait 1
+                       // second
+               } catch (TimeoutException ex) {
+                       // timed out. Try to stop the code if possible.
+                       String apiNodeId = null;
+                       try {
+                               apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port;
+                       } catch (UnknownHostException e1) {
+                               // TODO Auto-generated catch block
+                               e1.printStackTrace();
+                       }
+
+                       try {
+                               if (fKafkaLiveLockAvoider != null)
+                                       fKafkaLiveLockAvoider.unlockConsumerGroup(apiNodeId, fTopic + "::" + fGroup);
+                       } catch (Exception e) {
+                               log.error("unlockConsumerGroup(" + apiNodeId + "," + fTopic + "::" + fGroup);
+                       }
+
+                       forcePollOnConsumer();
+                       future.cancel(true);
+               } catch (Exception ex) {
+                       // timed out. Try to stop the code if possible.
+                       future.cancel(true);
+               }
+               service.shutdown();
+
+               return null;
+
+       }
+
+       /**
+        * getName() method returns string type value. returns 3 parameters in
+        * string:- fTopic,fGroup,fId
+        * 
+        * @Override
+        */
+       public String getName() {
+               return fTopic + " : " + fGroup + " : " + fId;
+       }
+
+       /**
+        * getCreateTimeMs() method returns long type value. returns fCreateTimeMs
+        * variable value
+        * 
+        * @Override
+        * 
+        */
+       public long getCreateTimeMs() {
+               return fCreateTimeMs;
+       }
+
+       public org.apache.kafka.clients.consumer.KafkaConsumer<String, String> getConsumer() {
+               return kConsumer;
+       }
+
+       /**
+        * getLastAccessMs() method returns long type value. returns fLastTouch
+        * variable value
+        * 
+        * @Override
+        * 
+        */
+       public long getLastAccessMs() {
+               return fLastTouch;
+       }
+
+       /**
+        * getOffset() method returns long type value. returns offset variable value
+        * 
+        * @Override
+        * 
+        */
+       public long getOffset() {
+               return offset;
+       }
+
+       /**
+        * commit offsets commitOffsets() method will be called on closed of
+        * KafkaConsumer.
+        * 
+        * @Override
+        * 
+        *
+        *                      public void commitOffsets() { if (getState() ==
+        *           KafkaConsumer.State.CLOSED) { log.warn("commitOffsets() called
+        *           on closed KafkaConsumer " + getName()); return; }
+        *           fConnector.commitOffsets(); }
+        */
+
+       /**
+        * updating fLastTouch with current time in ms
+        */
+       public void touch() {
+               fLastTouch = System.currentTimeMillis();
+       }
+
+       /**
+        * getLastTouch() method returns long type value. returns fLastTouch
+        * variable value
+        * 
+        */
+       public long getLastTouch() {
+               return fLastTouch;
+       }
+
+       /**
+        * setting the kafkaConsumer state to closed
+        */
+       // public synchronized boolean close() {
+       public boolean close() {
+               if (getState() == Kafka011Consumer.State.CLOSED) {
+
+                       log.error("close() called on closed KafkaConsumer " + getName());
+                       return true;
+               }
+
+               // fConnector.shutdown();
+               boolean retVal = kafkaConnectorshuttask();
+               return retVal;
+
+       }
+
+       /* time out if the kafka shutdown fails for some reason */
+
+       private boolean kafkaConnectorshuttask() {
+               Callable<Boolean> run = new Callable<Boolean>() {
+                       @Override
+                       public Boolean call() throws Exception {
+
+                               try {
+                                       // System.out.println("attempt to delete " + kConsumer);
+                                       kConsumer.close();
+
+                               } catch (Exception e) {
+                                       log.info("@Kafka Stream shutdown erorr occurred " + getName() + " " + e);
+                                       throw new Exception("@Kafka Stream shutdown erorr occurred " + getName() + " " + e);
+                                       // return false;
+                               }
+                               log.info("Kafka connection closure with in 15 seconds by a Executors task");
+
+                               return true;
+                       }
+               };
+
+               @SuppressWarnings({ "rawtypes", "unchecked" })
+               RunnableFuture future = new FutureTask(run);
+               ExecutorService service = Executors.newSingleThreadExecutor();
+               service.execute(future);
+               try {
+                  future.get(200, TimeUnit.SECONDS); // wait 1
+                       // second
+               } catch (TimeoutException ex) {
+                       // timed out. Try to stop the code if possible.
+                       log.info("Timeout Occured - Kafka connection closure with in 300 seconds by a Executors task");
+                       future.cancel(true);
+                       setState(Kafka011Consumer.State.OPENED);
+               } catch (Exception ex) {
+                       // timed out. Try to stop the code if possible.
+                       log.error("Exception occured Occured - Kafka connection closure with in 300 seconds by a Executors task"
+                                       + ex);
+                       future.cancel(true);
+                       setState(Kafka011Consumer.State.OPENED);
+                       return false;
+               }
+               service.shutdown();
+               setState(Kafka011Consumer.State.CLOSED);
+               return true;
+       }
+
+       public void forcePollOnConsumer() {
+               Kafka011ConsumerUtil.forcePollOnConsumer(fTopic, fGroup, fId);
+
+       }
+
+       /**
+        * getConsumerGroup() returns Consumer group
+        * 
+        * @return
+        */
+       public String getConsumerGroup() {
+               return fGroup;
+       }
+
+       /**
+        * getConsumerId returns Consumer Id
+        * 
+        * @return
+        */
+       public String getConsumerId() {
+               return fId;
+       }
+
+       /**
+        * getState returns kafkaconsumer state
+        * 
+        * @return
+        */
+       private Kafka011Consumer.State getState() {
+               return this.state;
+       }
+
+       /**
+        * setState() sets the kafkaConsumer state
+        * 
+        * @param state
+        */
+       private void setState(Kafka011Consumer.State state) {
+               this.state = state;
+       }
+
+       // private ConsumerConnector fConnector;
+       private final String fTopic;
+       private final String fGroup;
+       private final String fId;
+       private final String fLogTag;
+       // private final KafkaStream<byte[], byte[]> fStream;
+       private KafkaConsumer<String, String> kConsumer;
+       private long fCreateTimeMs;
+       private long fLastTouch;
+       private long offset;
+       private Kafka011Consumer.State state;
+       private KafkaLiveLockAvoider2 fKafkaLiveLockAvoider;
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(Kafka011Consumer.class);
+       private final LinkedBlockingQueue<ConsumerRecord<String, String>> fPendingMsgs;
+       //private ArrayList<Kafka011Consumer> fconsumerList;
+       @Override
+       public void commitOffsets() {
+               if (getState() == Kafka011Consumer.State.CLOSED) {
+                       log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
+                       return;
+               }
+               kConsumer.commitSync();
+               // fConsumer.close();
+
+       }
+
+       @Override
+       public void setOffset(long offsetval) {
+               offset = offsetval;
+       }
+
+       
+       public void setConsumerCache(KafkaConsumerCache cache) {
+       }
+
+       //@Override
+       //public Message nextMessage(ArrayList<?> l) {
+               // TODO Auto-generated method stub
+               //return null;
+       //}
+}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java b/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java
new file mode 100644 (file)
index 0000000..ea9407b
--- /dev/null
@@ -0,0 +1,126 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.dmf.mr.backends.kafka;
+
+import java.util.ArrayList;
+
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * A consumer Util class for force polling when a rebalance issue is anticipated
+ * 
+ * @author Ram
+ *
+ */
+public class Kafka011ConsumerUtil {
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(Kafka011ConsumerUtil.class);
+
+       /**
+        * @param fconsumercache
+        * @param fTopic
+        * @param fGroup
+        * @param fId
+        * @return
+        */
+       public static boolean forcePollOnConsumer(final String fTopic, final String fGroup, final String fId) {
+
+               Thread forcepollThread = new Thread(new Runnable() {
+                       public void run() {
+                               try {
+
+                                       ArrayList<Kafka011Consumer> kcsList = null;
+
+                                       kcsList = KafkaConsumerCache.getInstance().getConsumerListForCG(fTopic + "::" + fGroup + "::", fId);
+                                       if (null != kcsList) {
+                                               for (int counter = 0; counter < kcsList.size(); counter++) {
+
+                                                       Kafka011Consumer kc1 = kcsList.get(counter);
+
+                                                       try {
+                                                               ConsumerRecords<String, String> recs = kc1.getConsumer().poll(0);
+                                                               log.info("soft poll on " + kc1);
+                                                       } catch (java.util.ConcurrentModificationException e) {
+                                                               log.error("Error occurs for " + e);
+                                                       }
+
+                                               }
+
+                                       }
+
+                               } catch (Exception e) {
+                                       log.error("Failed and go to Exception block for " + fGroup + " " + e.getMessage());
+                                       e.printStackTrace();
+                               }
+                       }
+               });
+
+               forcepollThread.start();
+
+               return false;
+
+       }
+
+       /**
+        * @param fconsumercache
+        * @param group
+        * @return
+        */
+       public static boolean forcePollOnConsumer(final String group) {
+
+               Thread forcepollThread = new Thread(new Runnable() {
+                       public void run() {
+                               try {
+                                       ArrayList<Kafka011Consumer> kcsList = new ArrayList<Kafka011Consumer>();
+                                       kcsList = KafkaConsumerCache.getInstance().getConsumerListForCG(group);
+
+                                       if (null != kcsList) {
+
+                                               for (int counter = 0; counter < kcsList.size(); counter++) {
+
+                                                       Kafka011Consumer kc1 = kcsList.get(counter);
+                                                       log.info("soft poll on remote nodes " + kc1);
+                                                       ConsumerRecords<String, String> recs = kc1.getConsumer().poll(0);
+                                               }
+
+                                       }
+
+                               } catch (java.util.ConcurrentModificationException e) {
+                                       log.error("Error occurs for " + e);
+                               } catch (Exception e) {
+                                       log.error("Failed and go to Exception block for " + group + " " + e.getMessage());
+                                       e.printStackTrace();
+                               }
+                       }
+               });
+
+               forcepollThread.start();
+               return false;
+
+       }
+
+}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt
new file mode 100644 (file)
index 0000000..dd6259f
--- /dev/null
@@ -0,0 +1,386 @@
+package com.att.dmf.mr.backends.kafka;
+
+import java.util.Arrays;
+import java.util.Properties;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.common.KafkaException;
+
+import com.att.dmf.mr.backends.Consumer;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * A consumer instance that's created per-request. These are stateless so that
+ * clients can connect to this service as a proxy.
+ * 
+ * @author peter
+ *
+ */
+public class KafkaConsumer implements Consumer {
+       private enum State {
+               OPENED, CLOSED
+       }
+
+       /**
+        * KafkaConsumer() is constructor. It has following 4 parameters:-
+        * 
+        * @param topic
+        * @param group
+        * @param id
+        * @param cc
+        * 
+        */
+
+       public KafkaConsumer(String topic, String group, String id, Properties prop) throws Exception {
+               fTopic = topic;
+               fGroup = group;
+               fId = id;
+               // fConnector = cc;
+
+               fCreateTimeMs = System.currentTimeMillis();
+               fLastTouch = fCreateTimeMs;
+               fPendingMsgs = new LinkedBlockingQueue<ConsumerRecord<String,String>> ();
+               fLogTag = fGroup + "(" + fId + ")/" + fTopic;
+               offset = 0;
+
+               state = KafkaConsumer.State.OPENED;
+
+               // final Map<String, Integer> topicCountMap = new HashMap<String,
+               // Integer>();
+               // topicCountMap.put(fTopic, 1);
+               // log.info(fLogTag +" kafka Consumer started at "
+               // +System.currentTimeMillis());
+               // final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
+               // fConnector.createMessageStreams(topicCountMap);
+               // final List<KafkaStream<byte[], byte[]>> streams =
+               // consumerMap.get(fTopic);
+
+               kConsumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(prop);
+               // System.out.println("I am in Consumer APP " + topic + "-- " +
+               // fConsumer);
+               kConsumer.subscribe(Arrays.asList(topic));
+               log.info(fLogTag + " kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
+       System.out.println("-----id " +id);
+       
+               
+         try { ConsumerRecords<String, String> records =
+                                                  kConsumer.poll(500); System.out.println("---" +
+                                                   records.count());
+                                         
+                                                   for (ConsumerRecord<String, String> record : records) {
+                                                   System.out.printf("offset = %d, key = %s, value = %s",
+                                                   record.offset(), record.key(), record.value()); String t =
+                                                   record.value();
+                                         
+                                                   }
+                 }catch(Exception e){
+                         System.out.println( e);
+                 }
+                       System.out.println(fLogTag + " kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
+                       kConsumer.commitSync();
+               //  fConsumer.close();  
+       
+
+               /*
+                * ConsumerRecords<String, String> records = fConsumer.poll(500);
+                * System.out.println("---" + records.count());
+                * 
+                * for (ConsumerRecord<String, String> record : records) {
+                * System.out.printf("offset = %d, key = %s, value = %s",
+                * record.offset(), record.key(), record.value()); String t =
+                * record.value();
+                * 
+                * }
+                * 
+                * 
+                * fConsumer.commitSync(); fConsumer.close();
+                */
+
+               // fStream = streams.iterator().next();
+       }
+       
+       
+       
+       private Consumer.Message makeMessage ( final ConsumerRecord<String,String> msg )
+       {
+               return new Consumer.Message()
+               {
+                       @Override
+                       public long getOffset ()
+                       {
+                               return msg.offset ();
+                       }
+                       
+                       @Override
+                       public String getMessage ()
+                       {
+                               return new String ( msg.value () );
+                       }
+               };
+       }
+       
+       @Override
+       public synchronized Consumer.Message nextMessage ()
+       {
+               
+               try
+               {
+                       if ( fPendingMsgs.size () > 0 )
+                       {
+                               return makeMessage ( fPendingMsgs.take () );
+                       }
+               }
+               catch ( InterruptedException x )
+               {
+                       log.warn ( "After size>0, pending msg take() threw InterruptedException. Ignoring. (" + x.getMessage () + ")", x );
+               }
+               
+               
+                       try
+                       {
+                               boolean foundMsgs = false;
+                               System.out.println("entering into pollingWWWWWWWWWWWWWWWWW");
+                               final ConsumerRecords<String,String> records = kConsumer.poll ( 100 );
+                               System.out.println("polling doneXXXXXXXXXXXXXXXXXXXXXXXXXXX....");
+                               for ( ConsumerRecord<String,String> record : records )
+                               {
+                                       foundMsgs = true;
+                                       fPendingMsgs.offer ( record );
+                               }
+                       
+                       }
+                       catch ( KafkaException x )
+                       {
+                               log.debug ( fLogTag + ": KafkaException " + x.getMessage () );
+                               
+                       }
+                       catch ( java.lang.IllegalStateException | java.lang.IllegalArgumentException x )
+                       {
+                               log.error ( fLogTag + ": Illegal state/arg exception in Kafka consumer; dropping stream. " + x.getMessage () );
+                       
+                       }
+                               
+               return null;
+       }
+       
+       
+
+       /**
+        * getName() method returns string type value. returns 3 parameters in
+        * string:- fTopic,fGroup,fId
+        * 
+        * @Override
+        */
+       public String getName() {
+               return fTopic + " : " + fGroup + " : " + fId;
+       }
+
+       /**
+        * getCreateTimeMs() method returns long type value. returns fCreateTimeMs
+        * variable value
+        * 
+        * @Override
+        * 
+        */
+       public long getCreateTimeMs() {
+               return fCreateTimeMs;
+       }
+
+       public org.apache.kafka.clients.consumer.KafkaConsumer getConsumer() {
+               return kConsumer;
+       }
+
+       /**
+        * getLastAccessMs() method returns long type value. returns fLastTouch
+        * variable value
+        * 
+        * @Override
+        * 
+        */
+       public long getLastAccessMs() {
+               return fLastTouch;
+       }
+
+       
+
+       /**
+        * getOffset() method returns long type value. returns offset variable value
+        * 
+        * @Override
+        * 
+        */
+       public long getOffset() {
+               return offset;
+       }
+
+       /**
+        * commit offsets commitOffsets() method will be called on closed of
+        * KafkaConsumer.
+        * 
+        * @Override
+        * 
+        *
+        *                      public void commitOffsets() { if (getState() ==
+        *           KafkaConsumer.State.CLOSED) { log.warn("commitOffsets() called
+        *           on closed KafkaConsumer " + getName()); return; }
+        *           fConnector.commitOffsets(); }
+        */
+
+       /**
+        * updating fLastTouch with current time in ms
+        */
+       public void touch() {
+               fLastTouch = System.currentTimeMillis();
+       }
+
+       /**
+        * getLastTouch() method returns long type value. returns fLastTouch
+        * variable value
+        * 
+        */
+       public long getLastTouch() {
+               return fLastTouch;
+       }
+
+       /**
+        * setting the kafkaConsumer state to closed
+        */
+       public synchronized boolean close() {
+
+               if (getState() == KafkaConsumer.State.CLOSED) {
+
+                       log.warn("close() called on closed KafkaConsumer " + getName());
+                       return true;
+               }
+
+               setState(KafkaConsumer.State.CLOSED);
+               // fConnector.shutdown();
+               boolean retVal = kafkaConnectorshuttask();
+               return retVal;
+
+       }
+
+       /* time out if the kafka shutdown fails for some reason */
+
+       private boolean kafkaConnectorshuttask() {
+               Callable<Boolean> run = new Callable<Boolean>() {
+                       @Override
+                       public Boolean call() throws Exception {
+                               // your code to be timed
+                               try {
+                               System.out.println("consumer closing....." + kConsumer);
+                                       kConsumer.close();
+                               } catch (Exception e) {
+                                       log.info("@@@@@@Kafka Stream shutdown erorr occurred " + getName() + " " + e);
+                               }
+                               log.info("Kafka connection closure with in 15 seconds by a Executors task");
+                               return true;
+                       }
+               };
+
+               RunnableFuture future = new FutureTask(run);
+               ExecutorService service = Executors.newSingleThreadExecutor();
+               service.execute(future);
+               Boolean result = null;
+               try {
+                       result = (Boolean) future.get(15, TimeUnit.SECONDS); // wait 1
+                                                                                                                                       // second
+               } catch (TimeoutException ex) {
+                       // timed out. Try to stop the code if possible.
+                       log.info("Timeout Occured - Kafka connection closure with in 15 seconds by a Executors task");
+                       future.cancel(true);
+               } catch (Exception ex) {
+                       // timed out. Try to stop the code if possible.
+                       log.info("Timeout Occured - Kafka connection closure with in 15 seconds by a Executors task" + ex);
+                       future.cancel(true);
+                       return false;
+               }
+               service.shutdown();
+               return true;
+       }
+
+       /**
+        * getConsumerGroup() returns Consumer group
+        * 
+        * @return
+        */
+       public String getConsumerGroup() {
+               return fGroup;
+       }
+
+       /**
+        * getConsumerId returns Consumer Id
+        * 
+        * @return
+        */
+       public String getConsumerId() {
+               return fId;
+       }
+
+       /**
+        * getState returns kafkaconsumer state
+        * 
+        * @return
+        */
+       private KafkaConsumer.State getState() {
+               return this.state;
+       }
+
+       /**
+        * setState() sets the kafkaConsumer state
+        * 
+        * @param state
+        */
+       private void setState(KafkaConsumer.State state) {
+               this.state = state;
+       }
+
+       // private ConsumerConnector fConnector;
+       private final String fTopic;
+       private final String fGroup;
+       private final String fId;
+       private final String fLogTag;
+       // private final KafkaStream<byte[], byte[]> fStream;
+       private final org.apache.kafka.clients.consumer.KafkaConsumer<String, String> kConsumer;
+       private long fCreateTimeMs;
+       private long fLastTouch;
+       private long offset;
+       private KafkaConsumer.State state;
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumer.class);
+       private final LinkedBlockingQueue<ConsumerRecord<String,String>> fPendingMsgs;
+       // private static final Logger log =
+       // LoggerFactory.getLogger(KafkaConsumer.class);
+
+       @Override
+       public void commitOffsets() {
+               if (getState() == KafkaConsumer.State.CLOSED) {
+                       log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
+                       return;
+               }
+               kConsumer.commitSync();
+               // fConsumer.close();
+
+       }
+
+
+
+       @Override
+       public void setOffset(long offsetval) {
+               // TODO Auto-generated method stub
+               offset = offsetval;
+       }
+}
@@ -8,21 +8,24 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.backends.kafka;
+package com.att.dmf.mr.backends.kafka;
 
 import java.io.IOException;
+import java.net.InetAddress;
+import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Enumeration;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map.Entry;
@@ -31,6 +34,8 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 
+import javax.annotation.Resource;
+
 import org.I0Itec.zkclient.exception.ZkException;
 import org.I0Itec.zkclient.exception.ZkInterruptedException;
 import org.apache.curator.framework.CuratorFramework;
@@ -46,26 +51,40 @@ import org.apache.curator.utils.ZKPaths;
 import org.apache.http.annotation.NotThreadSafe;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.KeeperException.NoNodeException;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.context.annotation.ComponentScan;
 
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.exception.DMaaPErrorMessages;
+import com.att.dmf.mr.utils.ConfigurationReader;
+//import org.slf4j.Logger;
 //import org.slf4j.LoggerFactory;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.MetricsSet;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.utils.ConfigurationReader;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.metrics.CdmTimer;
 
 /**
  * @NotThreadSafe but expected to be used within KafkaConsumerFactory, which
  *                must be
- * @author author
+ * @author peter
  *
  */
 @NotThreadSafe
 public class KafkaConsumerCache {
 
+       private static KafkaConsumerCache kafkaconscache = null;
+
+       public static KafkaConsumerCache getInstance() {
+               if (kafkaconscache == null)
+                       kafkaconscache = new KafkaConsumerCache();
+
+               return kafkaconscache;
+       }
+
        private static final String kSetting_ConsumerHandoverWaitMs = "cambria.consumer.cache.handoverWaitMs";
        private static final int kDefault_ConsumerHandoverWaitMs = 500;
 
@@ -82,7 +101,7 @@ public class KafkaConsumerCache {
        // the server at least every 30 seconds, timing out after 2 minutes should
        // be okay.
        // FIXME: consider allowing the client to specify its expected call rate?
-       private static final long kDefault_MustTouchEveryMs = (long)1000 * 60 * 2;
+       private static final long kDefault_MustTouchEveryMs = 1000 * 60 * 2;
 
        // check for expirations pretty regularly
        private static final long kDefault_SweepEverySeconds = 15;
@@ -91,10 +110,20 @@ public class KafkaConsumerCache {
                NOT_STARTED, CONNECTED, DISCONNECTED, SUSPENDED
        }
 
+       // @Qualifier("kafkalockavoid")
+
+       // @Resource
+       // @Qualifier("kafkalockavoid")
+       // KafkaLiveLockAvoider2 kafkaLiveLockAvoider;
+
+       @Autowired
+       private DMaaPErrorMessages errorMessages;
+
+       // KafkaLiveLockAvoider kafkaLiveLockAvoider = new KafkaLiveLockAvoider();
        /**
         * User defined exception class for kafka consumer cache
         * 
-        * @author author
+        * @author nilanjana.maity
         *
         */
        public class KafkaConsumerCacheException extends Exception {
@@ -126,29 +155,27 @@ public class KafkaConsumerCache {
         * @param s
         * @param metrics
         */
-       public KafkaConsumerCache(String apiId,  MetricsSet metrics) {
-
-               if (apiId == null) {
-                       throw new IllegalArgumentException("API Node ID must be specified.");
-               }
+       public KafkaConsumerCache() {
 
-               fApiId = apiId;
-       //      fSettings = s;
-               fMetrics = metrics;
-               String strkSetting_ZkBasePath= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_ZkBasePath);
-               if(null==strkSetting_ZkBasePath)strkSetting_ZkBasePath = kDefault_ZkBasePath;
+               String strkSetting_ZkBasePath = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               kSetting_ZkBasePath);
+               if (null == strkSetting_ZkBasePath)
+                       strkSetting_ZkBasePath = kDefault_ZkBasePath;
                fBaseZkPath = strkSetting_ZkBasePath;
 
-               fConsumers = new ConcurrentHashMap<String, KafkaConsumer>();
+               fConsumers = new ConcurrentHashMap<String, Kafka011Consumer>();
                fSweepScheduler = Executors.newScheduledThreadPool(1);
 
                curatorConsumerCache = null;
 
                status = Status.NOT_STARTED;
+               // Watcher for consumer rebalancing across nodes. Kafka011 rebalancing
+               // work around
 
                listener = new ConnectionStateListener() {
                        public void stateChanged(CuratorFramework client, ConnectionState newState) {
                                if (newState == ConnectionState.LOST) {
+
                                        log.info("ZooKeeper connection expired");
                                        handleConnectionLoss();
                                } else if (newState == ConnectionState.READ_ONLY) {
@@ -175,33 +202,32 @@ public class KafkaConsumerCache {
         * @throws KafkaConsumerCacheException
         */
        public void startCache(String mode, CuratorFramework curator) throws KafkaConsumerCacheException {
-               try {
 
-                       // CuratorFramework curator = null;
+               if (fApiId == null) {
+                       throw new IllegalArgumentException("API Node ID must be specified.");
+               }
+
+               try {
 
-                       // Changed the class from where we are initializing the curator
-                       // framework
-                       if (mode != null && mode.equals(CambriaConstants.CAMBRIA)) {
-                               curator = ConfigurationReader.getCurator();
-                       } else if (mode != null && mode.equals(CambriaConstants.DMAAP)) {
+                       if (mode != null && mode.equals(CambriaConstants.DMAAP)) {
                                curator = getCuratorFramework(curator);
                        }
-
                        curator.getConnectionStateListenable().addListener(listener);
-
                        setStatus(Status.CONNECTED);
-
                        curatorConsumerCache = new PathChildrenCache(curator, fBaseZkPath, true);
                        curatorConsumerCache.start();
-
                        curatorConsumerCache.getListenable().addListener(new PathChildrenCacheListener() {
                                public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception {
                                        switch (event.getType()) {
                                        case CHILD_ADDED: {
-                                               final String apiId = new String(event.getData().getData());
-                                               final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
+                                               try {
+                                                       final String apiId = new String(event.getData().getData());
+                                                       final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
 
-                                               log.info(apiId + " started consumer " + consumer);
+                                                       log.info(apiId + " started consumer " + consumer);
+                                               } catch (Exception ex) {
+                                                       log.info("#Error Occured during Adding child" + ex);
+                                               }
                                                break;
                                        }
                                        case CHILD_UPDATED: {
@@ -209,8 +235,11 @@ public class KafkaConsumerCache {
                                                final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
 
                                                if (fConsumers.containsKey(consumer)) {
-                                                       log.info(apiId + " claimed consumer " + consumer + " from " + fApiId);
-
+                                                       log.info(apiId + " claimed consumer " + consumer + " from " + fApiId
+                                                                       + " but wont hand over");
+                                                       // Commented so that it dont give the connection
+                                                       // until the active node is running for this client
+                                                       // id.
                                                        dropClaimedConsumer(consumer);
                                                }
 
@@ -220,12 +249,14 @@ public class KafkaConsumerCache {
                                                final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
 
                                                if (fConsumers.containsKey(consumer)) {
-                                                       log.info("Someone wanted consumer " + consumer + " gone;  removing it from the cache");
+                                                       log.info("Someone wanted consumer " + consumer
+                                                                       + " gone;  but not removing it from the cache");
                                                        dropConsumer(consumer, false);
                                                }
 
                                                break;
                                        }
+
                                        default:
                                                break;
                                        }
@@ -236,21 +267,23 @@ public class KafkaConsumerCache {
                        EnsurePath ensurePath = new EnsurePath(fBaseZkPath);
                        ensurePath.ensure(curator.getZookeeperClient());
 
-                       //final long freq = fSettings.getLong(kSetting_SweepEverySeconds, kDefault_SweepEverySeconds);
+                       // final long freq = fSettings.getLong(kSetting_SweepEverySeconds,
+                       // kDefault_SweepEverySeconds);
                        long freq = kDefault_SweepEverySeconds;
-                       String strkSetting_SweepEverySeconds = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_SweepEverySeconds);
-                       if(null==strkSetting_SweepEverySeconds) {
-                               strkSetting_SweepEverySeconds = kDefault_SweepEverySeconds+"";
+                       String strkSetting_SweepEverySeconds = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                                       kSetting_SweepEverySeconds);
+                       if (null != strkSetting_SweepEverySeconds) {
+                               freq = Long.parseLong(strkSetting_SweepEverySeconds);
                        }
-                       
-                         freq = Long.parseLong(strkSetting_SweepEverySeconds);
-                                       
+
                        fSweepScheduler.scheduleAtFixedRate(new sweeper(), freq, freq, TimeUnit.SECONDS);
                        log.info("KafkaConsumerCache started");
                        log.info("sweeping cached clients every " + freq + " seconds");
                } catch (ZkException e) {
+                       log.error("@@@@@@ ZK Exception occured for  " + e);
                        throw new KafkaConsumerCacheException(e);
                } catch (Exception e) {
+                       log.error("@@@@@@  Exception occured for  " + e);
                        throw new KafkaConsumerCacheException(e);
                }
        }
@@ -270,7 +303,6 @@ public class KafkaConsumerCache {
                        } catch (InterruptedException e) {
                                // Ignore
                                log.error("error while setting curator framework :" + e.getMessage());
-                               Thread.currentThread().interrupt();
                        }
                }
 
@@ -291,9 +323,9 @@ public class KafkaConsumerCache {
                                curatorConsumerCache.close();
                                log.info("Curator client closed");
                        } catch (ZkInterruptedException e) {
-                               log.error("Curator client close interrupted: " + e);
+                               log.warn("Curator client close interrupted: " + e.getMessage());
                        } catch (IOException e) {
-                               log.error("Error while closing curator PathChildrenCache for KafkaConsumerCache" + e);
+                               log.warn("Error while closing curator PathChildrenCache for KafkaConsumerCache" + e.getMessage());
                        }
 
                        curatorConsumerCache = null;
@@ -325,13 +357,13 @@ public class KafkaConsumerCache {
         * @param clientId
         * @return a consumer, or null
         */
-       public KafkaConsumer getConsumerFor(String topic, String consumerGroupId, String clientId)
+       public Kafka011Consumer getConsumerFor(String topic, String consumerGroupId, String clientId)
                        throws KafkaConsumerCacheException {
                if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
                        throw new KafkaConsumerCacheException("The cache service is unavailable.");
 
                final String consumerKey = makeConsumerKey(topic, consumerGroupId, clientId);
-               final KafkaConsumer kc = fConsumers.get(consumerKey);
+               final Kafka011Consumer kc = fConsumers.get(consumerKey);
 
                if (kc != null) {
                        log.debug("Consumer cache hit for [" + consumerKey + "], last was at " + kc.getLastTouch());
@@ -345,6 +377,66 @@ public class KafkaConsumerCache {
                return kc;
        }
 
+       /**
+        * Get a cached consumer by topic, group, and id, if it exists (and remains
+        * valid) In addition, this method waits for all other consumer caches in
+        * the cluster to release their ownership and delete their version of this
+        * consumer.
+        * 
+        * @param topic
+        * @param consumerGroupId
+        * @param clientId
+        * @return a consumer, or null
+        */
+       public ArrayList<Kafka011Consumer> getConsumerListForCG(String topicgroup, String clientId)
+                       throws KafkaConsumerCacheException {
+               if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
+                       throw new KafkaConsumerCacheException("The cache service is unavailable.");
+               ArrayList<Kafka011Consumer> kcl = new ArrayList<Kafka011Consumer>();
+               // final String consumerKey = makeConsumerKey(topic, consumerGroupId,
+               // clientId);
+               Enumeration<String> strEnum = fConsumers.keys();
+               String consumerLocalKey = null;
+               while (strEnum.hasMoreElements()) {
+                       consumerLocalKey = strEnum.nextElement();
+
+                       if (consumerLocalKey.startsWith(topicgroup) && (!consumerLocalKey.endsWith("::" + clientId))) {
+
+                               // System.out.println("consumer key returning from
+                               // getConsumerListforCG +++++++++ " + consumerLocalKey
+                               // + " " + fConsumers.get(consumerLocalKey));
+                               kcl.add(fConsumers.get(consumerLocalKey));
+
+                       }
+               }
+
+               return kcl;
+       }
+
+       public ArrayList<Kafka011Consumer> getConsumerListForCG(String group) throws KafkaConsumerCacheException {
+               if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
+                       throw new KafkaConsumerCacheException("The cache service is unavailable.");
+               ArrayList<Kafka011Consumer> kcl = new ArrayList<Kafka011Consumer>();
+               // final String consumerKey = makeConsumerKey(topic, consumerGroupId,
+               // clientId);
+               Enumeration<String> strEnum = fConsumers.keys();
+               String consumerLocalKey = null;
+               while (strEnum.hasMoreElements()) {
+                       consumerLocalKey = strEnum.nextElement();
+
+                       if (consumerLocalKey.startsWith(group)) {
+
+                               // System.out.println("consumer key returning from
+                               // getConsumerListforCG +++++++++ " + consumerLocalKey
+                               // + " " + fConsumers.get(consumerLocalKey));
+                               kcl.add(fConsumers.get(consumerLocalKey));
+
+                       }
+               }
+
+               return kcl;
+       }
+
        /**
         * Put a consumer into the cache by topic, group and ID
         * 
@@ -354,24 +446,28 @@ public class KafkaConsumerCache {
         * @param consumer
         * @throws KafkaConsumerCacheException
         */
-       public void putConsumerFor(String topic, String consumerGroupId, String consumerId, KafkaConsumer consumer)
+       public void putConsumerFor(String topic, String consumerGroupId, String consumerId, Kafka011Consumer consumer)
                        throws KafkaConsumerCacheException {
                if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
                        throw new KafkaConsumerCacheException("The cache service is unavailable.");
 
                final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
                fConsumers.put(consumerKey, consumer);
+
+               // String appId = "node-instance-"+i;
+
+               log.info("^@ Consumer Added to Cache Consumer Key" + consumerKey + " ApiId" + fApiId);
        }
 
        public Collection<? extends Consumer> getConsumers() {
-               return new LinkedList<KafkaConsumer>(fConsumers.values());
+               return new LinkedList<Kafka011Consumer>(fConsumers.values());
        }
 
        /**
         * This method is to drop all the consumer
         */
        public void dropAllConsumers() {
-               for (Entry<String, KafkaConsumer> entry : fConsumers.entrySet()) {
+               for (Entry<String, Kafka011Consumer> entry : fConsumers.entrySet()) {
                        dropConsumer(entry.getKey(), true);
                }
 
@@ -396,18 +492,33 @@ public class KafkaConsumerCache {
                }
 
                // First, drop this consumer from our cache
-               dropConsumer(key, true);
-
+               boolean isdrop = dropConsumer(key, true);
+               if (!isdrop) {
+                       return;
+               }
                final CuratorFramework curator = ConfigurationReader.getCurator();
 
                try {
                        curator.delete().guaranteed().forPath(fBaseZkPath + "/" + key);
+                       log.info(" ^ deleted " + fBaseZkPath + "/" + key);
                } catch (NoNodeException e) {
-                       log.error("Exception at : " + e);
+                       log.warn("A consumer was deleted from " + fApiId
+                                       + "'s cache, but no Cambria API node had ownership of it in ZooKeeper");
                } catch (Exception e) {
-                       log.error("Unexpected exception while deleting consumer: " + e);
+                       log.debug("Unexpected exception while deleting consumer: " + e.getMessage());
+                       log.info(" %%%%%%@# Unexpected exception while deleting consumer: " + e.getMessage());
                }
 
+               try {
+                       int consumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
+                       String strkSetting_ConsumerHandoverWaitMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                                       kSetting_ConsumerHandoverWaitMs);
+                       if (strkSetting_ConsumerHandoverWaitMs != null)
+                               consumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
+                       Thread.sleep(consumerHandoverWaitMs);
+               } catch (InterruptedException e) {
+                       // Ignore
+               }
                log.info("Dropped " + key + " consumer due to timeout");
        }
 
@@ -423,7 +534,7 @@ public class KafkaConsumerCache {
                        fMetrics.onKafkaConsumerClaimed();
                        log.info("Consumer [" + key + "] claimed by another node.");
                }
-
+               log.info("^dropping claimed Kafka consumer " + key);
                dropConsumer(key, false);
        }
 
@@ -434,23 +545,38 @@ public class KafkaConsumerCache {
         * @param key
         * @param dueToTimeout
         */
-       private void dropConsumer(String key, boolean dueToTimeout) {
-               final KafkaConsumer kc = fConsumers.remove(key);
-
+       private boolean dropConsumer(String key, boolean dueToTimeout) {
+               final Kafka011Consumer kc = fConsumers.get(key);
+               log.info("closing Kafka consumer " + key + " object " + kc);
                if (kc != null) {
-                       log.info("closing Kafka consumer " + key);
-                       kc.close();
+                       // log.info("closing Kafka consumer " + key);
+                       if (kc.close()) {
+                               fConsumers.remove(key);
+
+                       } else {
+                               return false;
+                       }
                }
+               return true;
        }
 
-//     private final rrNvReadable fSettings;
-       private final MetricsSet fMetrics;
+       // private final rrNvReadable fSettings;
+       private MetricsSet fMetrics;
        private final String fBaseZkPath;
        private final ScheduledExecutorService fSweepScheduler;
-       private final String fApiId;
+       private String fApiId;
+
+       public void setfMetrics(final MetricsSet metrics) {
+               this.fMetrics = metrics;
+       }
+
+       public void setfApiId(final String id) {
+               this.fApiId = id;
+       }
+
        private final ConnectionStateListener listener;
 
-       private ConcurrentHashMap<String, KafkaConsumer> fConsumers;
+       private ConcurrentHashMap<String, Kafka011Consumer> fConsumers;
        private PathChildrenCache curatorConsumerCache;
 
        private volatile Status status;
@@ -458,18 +584,19 @@ public class KafkaConsumerCache {
        private void handleReconnection() {
 
                log.info("Reading current cache data from ZK and synchronizing local cache");
-
                final List<ChildData> cacheData = curatorConsumerCache.getCurrentData();
-
                // Remove all the consumers in this API nodes cache that now belong to
                // other API nodes.
                for (ChildData cachedConsumer : cacheData) {
                        final String consumerId = ZKPaths.getNodeFromPath(cachedConsumer.getPath());
                        final String owningApiId = (cachedConsumer.getData() != null) ? new String(cachedConsumer.getData())
                                        : "undefined";
-
                        if (!fApiId.equals(owningApiId)) {
-                               fConsumers.remove(consumerId);
+                               fConsumers.remove(consumerId); // Commented to avoid removing
+                               // the value cache hashmap but the lock still exists.
+                               // This is not considered in kafka consumer Factory
+                               log.info("@@@ Validating current cache data from ZK and synchronizing local cache" + owningApiId
+                                               + " removing " + consumerId);
                        }
                }
 
@@ -492,8 +619,12 @@ public class KafkaConsumerCache {
        }
 
        private void closeAllCachedConsumers() {
-               for (Entry<String, KafkaConsumer> entry : fConsumers.entrySet()) {
-                       entry.getValue().close();
+               for (Entry<String, Kafka011Consumer> entry : fConsumers.entrySet()) {
+                       try {
+                               entry.getValue().close();
+                       } catch (Exception e) {
+                               log.info("@@@@@@ Error occurd while closing Clearing All cache " + e);
+                       }
                }
        }
 
@@ -513,22 +644,20 @@ public class KafkaConsumerCache {
                        throws KafkaConsumerCacheException {
                // get a lock at <base>/<topic>::<consumerGroupId>::<consumerId>
                final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
+               final CdmTimer timer = new CdmTimer(fMetrics, "CacheSignalOwnership");
 
                try {
                        final String consumerPath = fBaseZkPath + "/" + consumerKey;
-
                        log.debug(fApiId + " attempting to claim ownership of consumer " + consumerKey);
-
                        final CuratorFramework curator = ConfigurationReader.getCurator();
 
                        try {
                                curator.setData().forPath(consumerPath, fApiId.getBytes());
                        } catch (KeeperException.NoNodeException e) {
-                               log.error(e.toString());
                                curator.create().creatingParentsIfNeeded().forPath(consumerPath, fApiId.getBytes());
                        }
-
                        log.info(fApiId + " successfully claimed ownership of consumer " + consumerKey);
+                       timer.end();
                } catch (Exception e) {
                        log.error(fApiId + " failed to claim ownership of consumer " + consumerKey);
                        throw new KafkaConsumerCacheException(e);
@@ -537,36 +666,37 @@ public class KafkaConsumerCache {
                log.info("Backing off to give the Kafka broker time to clean up the ZK data for this consumer");
 
                try {
-                       int kSetting_ConsumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
-                       String strkSetting_ConsumerHandoverWaitMs= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_ConsumerHandoverWaitMs+"");
-                       if(strkSetting_ConsumerHandoverWaitMs!=null) {
-                               kSetting_ConsumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
-                       }
-                                       Thread.sleep(kSetting_ConsumerHandoverWaitMs);
-                       //Thread.sleep(fSettings.getInt(kSetting_ConsumerHandoverWaitMs, kDefault_ConsumerHandoverWaitMs));
+                       int consumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
+                       String strkSetting_ConsumerHandoverWaitMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                                       kSetting_ConsumerHandoverWaitMs);
+                       if (strkSetting_ConsumerHandoverWaitMs != null)
+                               consumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
+                       Thread.sleep(consumerHandoverWaitMs);
                } catch (InterruptedException e) {
-                       log.error(e.toString());
-                       Thread.currentThread().interrupt();
+                       // Ignore
                }
        }
 
-       private void sweep() {
+       public KafkaLiveLockAvoider2 getkafkaLiveLockAvoiderObj() {
+               return null;
+       }
+
+       public void sweep() {
                final LinkedList<String> removals = new LinkedList<String>();
                long mustTouchEveryMs = kDefault_MustTouchEveryMs;
-               String strkSetting_TouchEveryMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_TouchEveryMs);
-               //if(null!=strkSetting_TouchEveryMs) strkSetting_TouchEveryMs = kDefault_MustTouchEveryMs+"";
-               if(null!=strkSetting_TouchEveryMs)
-               {
-                 mustTouchEveryMs = Long.parseLong(strkSetting_TouchEveryMs);  
+               String strkSetting_TouchEveryMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               kSetting_TouchEveryMs);
+               if (null != strkSetting_TouchEveryMs) {
+                       mustTouchEveryMs = Long.parseLong(strkSetting_TouchEveryMs);
                }
 
-               //final long mustTouchEveryMs = fSettings.getLong(kSetting_TouchEveryMs, kDefault_MustTouchEveryMs);
+               // final long mustTouchEveryMs =
+               // fSettings.getLong(kSetting_TouchEveryMs, kDefault_MustTouchEveryMs);
                final long oldestAllowedTouchMs = System.currentTimeMillis() - mustTouchEveryMs;
 
-               for (Entry<String, KafkaConsumer> e : fConsumers.entrySet()) {
+               for (Entry<String, Kafka011Consumer> e : fConsumers.entrySet()) {
                        final long lastTouchMs = e.getValue().getLastTouch();
-
-                       log.debug("consumer " + e.getKey() + " last touched at " + lastTouchMs);
+                       log.debug("consumer #####1" + e.getKey() + "    " + lastTouchMs + " < " + oldestAllowedTouchMs);
 
                        if (lastTouchMs < oldestAllowedTouchMs) {
                                log.info("consumer " + e.getKey() + " has expired");
@@ -582,7 +712,7 @@ public class KafkaConsumerCache {
        /**
         * Creating a thread to run the sweep method
         * 
-        * @author author
+        * @author nilanjana.maity
         *
         */
        private class sweeper implements Runnable {
@@ -614,5 +744,6 @@ public class KafkaConsumerCache {
        }
 
        private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumerCache.class);
-       //private static final Logger log = LoggerFactory.getLogger(KafkaConsumerCache.class);
+       // private static final Logger log =
+       // LoggerFactory.getLogger(KafkaConsumerCache.class);
 }
\ No newline at end of file
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java
new file mode 100644 (file)
index 0000000..805701a
--- /dev/null
@@ -0,0 +1,161 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.dmf.mr.backends.kafka;
+
+
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import javax.annotation.PostConstruct;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.recipes.locks.InterProcessMutex;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.Watcher;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.stereotype.Component;
+import org.springframework.stereotype.Service;
+
+//@ComponentScan(basePackages="com.att.dmf.mr.backends.kafka")
+@Component
+public class KafkaLiveLockAvoider2 {
+       
+       public static final String ZNODE_ROOT = "/live-lock-avoid";
+       public static final String ZNODE_LOCKS = "/locks";
+       public static final String ZNODE_UNSTICK_TASKS ="/unstick-tasks";
+       
+       private static String locksPath = ZNODE_ROOT+ZNODE_LOCKS;
+       private static String tasksPath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS;
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaLiveLockAvoider2.class.getName());
+       
+       @Autowired
+       @Qualifier("curator")   
+       private CuratorFramework curatorFramework;
+       
+   @PostConstruct
+       public void init() {
+       System.out.println("Welcome......................................................................................");
+       try {
+               if (curatorFramework.checkExists().forPath(locksPath) == null) {
+                       curatorFramework.create().creatingParentsIfNeeded().forPath(locksPath);
+               }
+               if (curatorFramework.checkExists().forPath(tasksPath) == null) {
+                       curatorFramework.create().creatingParentsIfNeeded().forPath(tasksPath);
+               }
+               
+       } catch (Exception e) {
+               //e.printStackTrace();
+               log.error("Error during creation of permanent Znodes under /live-lock-avoid ",e);
+               
+       }
+       
+               
+       }
+       public void unlockConsumerGroup(String appId, String groupName) throws Exception {
+               
+               log.info("Signalling unlock to all conumsers of in group [{}] now, " ,  groupName);
+               
+               String fullLockPath = String.format("%s/%s", locksPath, groupName );
+               String fullTasksPath = null;
+               
+               try {
+
+                       //Use the Curator recipe for a Mutex lock, only one process can be broadcasting unlock instructions for a group
+                       InterProcessMutex lock = new InterProcessMutex(curatorFramework, fullLockPath);
+                       if ( lock.acquire(100L, TimeUnit.MILLISECONDS) ) 
+                       {
+                               try 
+                               {
+                                       List<String> taskNodes = curatorFramework.getChildren().forPath(tasksPath);
+                                       for (String taskNodeName : taskNodes) {
+                                               if(!taskNodeName.equals(appId)) {
+                                                       
+                                                       fullTasksPath = String.format("%s/%s/%s", tasksPath, taskNodeName, groupName);
+                                                       log.info("Writing groupName {} to path {}",groupName, fullTasksPath);
+                                                       
+                                                       
+                                                       if(curatorFramework.checkExists().forPath(fullTasksPath) != null) {
+                                                               curatorFramework.delete().forPath(fullTasksPath);
+                                                       }
+                                                       curatorFramework.create().withMode(CreateMode.EPHEMERAL).forPath(fullTasksPath);
+                                               }
+                                       }
+                                       
+
+                               }
+                               finally
+                               {
+                                       //Curator lock recipe requires a acquire() to be followed by a release()
+                                       lock.release();
+                               }
+                       }else {
+                               log.info("Could not obtain the avoider lock, another process has the avoider lock? {}", !lock.isAcquiredInThisProcess() );
+                       }
+
+
+               } catch (Exception e) {
+                       log.error("Error setting up either lock ZNode {} or task  ZNode {}",fullLockPath, fullTasksPath,e);
+                       throw e;
+               }
+               
+               
+       }
+       
+       /*
+        * Shoud be called once per MR server instance.
+        * 
+        */
+       public void startNewWatcherForServer(String appId, LiveLockAvoidance avoidanceCallback) {
+               LockInstructionWatcher instructionWatcher = new LockInstructionWatcher(curatorFramework,avoidanceCallback,this);
+               assignNewProcessNode(appId, instructionWatcher);
+               
+       }
+       
+       
+       protected void assignNewProcessNode(String appId, Watcher processNodeWatcher ) {
+               
+               String taskHolderZnodePath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS+"/"+appId;
+               //Watcher processNodeWatcher = createWatcher();
+               
+               try {
+                       
+                       if(curatorFramework.checkExists().forPath(taskHolderZnodePath) != null) {
+                               curatorFramework.delete().deletingChildrenIfNeeded().forPath(taskHolderZnodePath);
+
+                       }
+                       curatorFramework.create().forPath(taskHolderZnodePath);
+                       //setup the watcher
+                       curatorFramework.getChildren().usingWatcher(processNodeWatcher).inBackground().forPath(taskHolderZnodePath);
+                       log.info("Done creating task holder and watcher for APP name: {}",appId);
+                       
+               } catch (Exception e) {
+                       log.error("Could not add new processing node for name {}", appId, e);
+               }
+                               
+       }
+
+       
+}
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.backends.kafka;
+package com.att.dmf.mr.backends.kafka;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -27,28 +27,32 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Properties;
 
-import kafka.common.FailedToSendMessageException;
-import kafka.javaapi.producer.Producer;
-import kafka.producer.KeyedMessage;
-import kafka.producer.ProducerConfig;
-
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.Producer;
+import org.apache.kafka.clients.producer.ProducerRecord;
 import org.json.JSONException;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.dmf.mr.backends.Publisher;
+import com.att.dmf.mr.constants.CambriaConstants;
 //import org.slf4j.Logger;
 //import org.slf4j.LoggerFactory;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.nsa.cambria.backends.Publisher;
-import com.att.nsa.cambria.constants.CambriaConstants;
 import com.att.nsa.drumlin.till.nv.rrNvReadable;
 
+//import kafka.FailedToSendMessageException;
+//import kafka.javaapi.producer.Producer;
+//import kafka.producer.KeyedMessage;
+//import kafka.producer.ProducerConfig;
+//import kafka.producer.KeyedMessage;
+
 /**
  * Sends raw JSON objects into Kafka.
  * 
  * Could improve space: BSON rather than JSON?
  * 
- * @author author
+ * @author peter
  *
  */
 
@@ -67,21 +71,32 @@ public class KafkaPublisher implements Publisher {
                transferSetting(fSettings, props, "request.required.acks", "1");
                transferSetting(fSettings, props, "message.send.max.retries", "5");
                transferSetting(fSettings, props, "retry.backoff.ms", "150"); */
-               String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list"); 
-               System.out.println("kafkaConnUrl:- "+kafkaConnUrl);
-               if(null==kafkaConnUrl){ 
-                       kafkaConnUrl="localhost:9092"; 
-               }               
-               transferSetting( props, "metadata.broker.list", kafkaConnUrl);
+               String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list");
+               if(null==kafkaConnUrl){
+                       
+                       kafkaConnUrl="localhost:9092";
+               }
+               //String jaaspath="C:/ATT/Apps/dmaapCodedmaap-framework/dmaap/bundleconfig-local/etc/appprops/kafka_pub_jaas.conf";
+        //     props.put("bootstrap.servers", bootSever);
+       //System.setProperty("java.security.auth.login.config",jaaspath);
+       
+               /*transferSetting( props, "sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+               transferSetting( props, "security.protocol", "SASL_PLAINTEXT");
+               transferSetting( props, "sasl.mechanism", "PLAIN");*/
+               transferSetting( props, "bootstrap.servers",kafkaConnUrl);
+                       //transferSetting( props, "metadata.broker.list", kafkaConnUrl);
                transferSetting( props, "request.required.acks", "1");
                transferSetting( props, "message.send.max.retries", "5");
                transferSetting(props, "retry.backoff.ms", "150"); 
 
-               props.put("serializer.class", "kafka.serializer.StringEncoder");
+               //props.put("serializer.class", "kafka.serializer.StringEncoder");
+               
+               props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+                props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
 
-               fConfig = new ProducerConfig(props);
-               fProducer = new Producer<String, String>(fConfig);
+               //fConfig = new ProducerConfig(props);
+               //fProducer = new Producer<String, String>(fConfig);
+               fProducer = new KafkaProducer<>(props);
        }
 
        /**
@@ -92,19 +107,19 @@ public class KafkaPublisher implements Publisher {
         * @throws JSONException
         */
        @Override
-       public void sendMessage(String topic, message msg) throws IOException, FailedToSendMessageException {
+       public void sendMessage(String topic, message msg) throws IOException{
                final List<message> msgs = new LinkedList<message>();
                msgs.add(msg);
                sendMessages(topic, msgs);
        }
 
-       /**
+       /**  
         * method publishing batch messages
-        * 
+       * This method is commented from 0.8 to 0.11 upgrade
         * @param topic
         * @param kms
         * throws IOException
-        */
+        *
        public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
                try {
                        fProducer.send(kms);
@@ -114,8 +129,26 @@ public class KafkaPublisher implements Publisher {
                        throw new FailedToSendMessageException(excp.getMessage(), excp);
                }
 
-       }
+       } */
+
 
+       /*
+        * Kafka 11.0 Interface
+        * @see com.att.nsa.cambria.backends.Publisher#sendBatchMessageNew(java.lang.String, java.util.ArrayList)
+        */
+       public void sendBatchMessageNew(String topic, ArrayList <ProducerRecord<String,String>> kms) throws IOException {
+               try {
+                       for (ProducerRecord<String,String> km : kms) {
+                               fProducer.send(km);
+                       }
+
+               } catch (Exception excp) { 
+                       log.error("Failed to send message(s) to topic [" + topic + "].", excp);
+                       throw new IOException(excp.getMessage(), excp);
+               }
+
+       }
+       
        /**
         * Send a set of messages. Each must have a "key" string value.
         * 
@@ -123,7 +156,7 @@ public class KafkaPublisher implements Publisher {
         * @param msg
         * @throws FailedToSendMessageException
         * @throws JSONException
-        */
+        *
        @Override
        public void sendMessages(String topic, List<? extends message> msgs)
                        throws IOException, FailedToSendMessageException {
@@ -141,11 +174,33 @@ public class KafkaPublisher implements Publisher {
                        log.error("Failed to send message(s) to topic [" + topic + "].", excp);
                        throw new FailedToSendMessageException(excp.getMessage(), excp);
                }
-       }
+       } */
+       @Override
+       public void sendMessagesNew(String topic, List<? extends message> msgs)
+                       throws IOException {
+               log.info("sending " + msgs.size() + " events to [" + topic + "]");
+try{
+               final List<ProducerRecord<String, String>> kms = new ArrayList<ProducerRecord<String, String>>(msgs.size());
+                       for (message o : msgs) {
+                       
+                       final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, o.getKey(), o.toString());
+                       //kms.add(data);
+               
+               try {
+
+                       fProducer.send(data);
 
+               } catch (Exception excp) {
+                       log.error("Failed to send message(s) to topic [" + topic + "].", excp);
+                       throw new Exception(excp.getMessage(), excp);
+               }
+       }
+               
+       }catch(Exception e){}
+}
        //private final rrNvReadable fSettings;
 
-       private ProducerConfig fConfig;
+       //private ProducerConfig fConfig;
        private Producer<String, String> fProducer;
 
   /**
@@ -165,4 +220,16 @@ public class KafkaPublisher implements Publisher {
        //private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class);
 
        private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class);
-}
+
+       @Override
+       public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
+               // TODO Auto-generated method stub
+               
+       }
+
+       //@Override
+       //public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
+               // TODO Auto-generated method stub
+               
+       //}
+}
\ No newline at end of file
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java b/src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java
new file mode 100644 (file)
index 0000000..a13ecea
--- /dev/null
@@ -0,0 +1,45 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.dmf.mr.backends.kafka;
+
+
+
+/**
+ * Live Lock Avoidance interface.  To be implemented by the main message router client
+ *
+ */
+public interface LiveLockAvoidance {
+       
+       /**
+        * Gets the unique id
+        * @return the unique id for the Message Router server instance
+        */
+       String getAppId();
+       
+       
+       /**
+        * Main callback to inform the local MR server instance that all consumers in a group need to soft poll
+        * @param groupName name of the Kafka consumer group needed a soft poll
+        */
+       void handleRebalanceUnlock( String groupName);
+
+}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java b/src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java
new file mode 100644 (file)
index 0000000..5d3bc47
--- /dev/null
@@ -0,0 +1,100 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.dmf.mr.backends.kafka;
+
+import java.util.List;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.zookeeper.WatchedEvent;
+import org.apache.zookeeper.Watcher;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * 
+ * LockInstructionWatcher
+ * A package-private class used internally by the KafkaLiveLockAvoider.  
+ * 
+ * This class implements the zookeeper Watcher callback and listens for changes on child nodes changing.
+ * Each child node is actually a Kafka group name that needs to be soft polled.  Deletion of the child nodes
+ * after soft poll unlocking is finished.
+ * 
+ *
+ */
+public class LockInstructionWatcher implements Watcher {
+       
+       private CuratorFramework curatorFramework;
+       private LiveLockAvoidance avoidanceCallback;
+       private KafkaLiveLockAvoider2 avoider;
+       
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(LockInstructionWatcher.class.getName());
+       
+
+       public LockInstructionWatcher(CuratorFramework curatorFramework, LiveLockAvoidance avoidanceCallback,
+                       KafkaLiveLockAvoider2 avoider) {
+               super();
+               this.curatorFramework = curatorFramework;
+               this.avoidanceCallback = avoidanceCallback;
+               this.avoider = avoider;
+       }
+
+
+       @Override
+       public void process(WatchedEvent event) {
+               
+               switch (event.getType()) {
+               case NodeChildrenChanged:
+                       
+
+                       try {
+                               
+                               log.info("node children changed at path: {}", event.getPath());
+                               //String grpName = new String(curatorFramework.getData().forPath(event.getPath()));
+                               List<String> children = curatorFramework.getChildren().forPath(event.getPath());
+                               
+                               log.info("found children nodes prodcessing now");
+                               for (String child : children) {
+                                       String childPath = String.format("%s/%s", event.getPath(), child);
+                                       log.info("Processing child task at node {}",childPath);
+                                       avoidanceCallback.handleRebalanceUnlock( child);
+                                       log.info("Deleting child task at node {}",childPath);
+                                       curatorFramework.delete().forPath(childPath);
+                                       } 
+                               //reset the watch with the avoider
+                               avoider.assignNewProcessNode(avoidanceCallback.getAppId(), this);
+                       
+                               
+                       } catch (Exception e) {
+                               log.error("Error manipulating ZNode data in watcher",e);
+                       }
+                       
+                       break;
+
+               default:
+                       log.info("Listner fired on path: {}, with event: {}", event.getPath(), event.getType());
+                       break;
+               }
+       }
+       
+
+}
@@ -8,27 +8,29 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.backends.memory;
+package com.att.dmf.mr.backends.memory;
 
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashMap;
 
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.ConsumerFactory;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.ConsumerFactory;
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class MemoryConsumerFactory implements ConsumerFactory
@@ -52,7 +54,7 @@ public class MemoryConsumerFactory implements ConsumerFactory
         * @return Consumer
         */
        @Override
-       public Consumer getConsumerFor ( String topic, String consumerGroupId, String clientId, int timeoutMs )
+       public Consumer getConsumerFor ( String topic, String consumerGroupId, String clientId, int timeoutMs, String remotehost )
        {
                return new MemoryConsumer ( topic, consumerGroupId );
        }
@@ -97,8 +99,9 @@ public class MemoryConsumerFactory implements ConsumerFactory
                private long fLastAccessMs;
 
                @Override
-               public void close() {
+               public boolean close() {
                        //Nothing to close/clean up.
+                       return true;
                }
                /**
                 * 
@@ -135,6 +138,16 @@ public class MemoryConsumerFactory implements ConsumerFactory
                {
                        return fLastAccessMs;
                }
+
+               
+
+               @Override
+               public void setOffset(long offset) {
+                       // TODO Auto-generated method stub
+                       
+               }
+
+               
        }
 
        @Override
@@ -157,4 +170,13 @@ public class MemoryConsumerFactory implements ConsumerFactory
        {
                return new ArrayList<MemoryConsumer> ();
        }
+
+       @Override
+       public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs,
+                       String remotehost) throws UnavailableException, CambriaApiException {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       
 }
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.backends.memory;
+package com.att.dmf.mr.backends.memory;
 
 import java.util.HashMap;
 import java.util.LinkedList;
@@ -27,16 +27,15 @@ import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
 
-import com.att.nsa.cambria.metabroker.Broker;
-import com.att.nsa.cambria.metabroker.Topic;
+import com.att.dmf.mr.metabroker.Broker;
+import com.att.dmf.mr.metabroker.Topic;
 import com.att.nsa.configs.ConfigDb;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
 import com.att.nsa.security.NsaAcl;
 import com.att.nsa.security.NsaApiKey;
 
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class MemoryMetaBroker implements Broker {
@@ -8,30 +8,30 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.backends.memory;
+package com.att.dmf.mr.backends.memory;
 
 import java.util.ArrayList;
 import java.util.HashMap;
 
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.Publisher.message;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.Publisher.message;
 
 /**
  * When broker type is memory, then this class is doing all the topic related
  * operations
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class MemoryQueue {
@@ -120,7 +120,7 @@ public class MemoryQueue {
        /**
         * static inner class used to details about consumed messages
         * 
-        * @author author
+        * @author anowarul.islam
         *
         */
        private static class msgInfo implements Consumer.Message {
@@ -152,7 +152,7 @@ public class MemoryQueue {
 
  /**
  * 
- * @author author
+ * @author sneha.d.desai
  *
  * private LogBuffer class has synchronized push and read method
  */
@@ -8,31 +8,33 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.backends.memory;
+package com.att.dmf.mr.backends.memory;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import com.att.nsa.cambria.backends.Publisher;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import org.apache.kafka.clients.producer.ProducerRecord;
+
+import com.att.dmf.mr.backends.Publisher;
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
+
 
-import kafka.producer.KeyedMessage;
 
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class MemoryQueuePublisher implements Publisher {
@@ -46,14 +48,6 @@ public class MemoryQueuePublisher implements Publisher {
                fQueue = q;
        }
 
-       /**
-        * sendBatchMessages
-        * 
-        * @param topic
-        * @param kms
-        */
-       public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
-       }
        
        /**
         * 
@@ -79,6 +73,14 @@ public class MemoryQueuePublisher implements Publisher {
         * @param msgs
         * @throws IOException
         */
+
+       public void sendBatchMessageNew(String topic, ArrayList<ProducerRecord<String, String>> kms) throws IOException {
+
+       }
+
+       public void sendMessagesNew(String topic, List<? extends message> msgs) throws IOException {
+       }
+
        public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
                for (message m : msgs) {
                        sendMessage(topic, m);
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.backends.memory;
+package com.att.dmf.mr.backends.memory;
 
 import java.io.File;
 import java.io.FileNotFoundException;
@@ -28,14 +28,16 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import com.att.nsa.cambria.backends.Publisher;
+import org.apache.kafka.clients.producer.ProducerRecord;
 
-import kafka.producer.KeyedMessage;
+import com.att.dmf.mr.backends.Publisher;
+
+//import kafka.producer.KeyedMessage;
 
 /**
  * class used for logging perspective
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class MessageLogger implements Publisher {
@@ -73,13 +75,13 @@ public class MessageLogger implements Publisher {
         * @param topic
         * @param kms
         * @throws IOException
-        */
+       
        @Override
        public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws
 
        IOException {
        }
-
+ */
        private FileOutputStream fStream;
 
        /**
@@ -98,4 +100,10 @@ public class MessageLogger implements Publisher {
                fStream.write(msg.getMessage().getBytes());
                fStream.write('\n');
        }
+       public void sendBatchMessageNew(String topic, ArrayList<ProducerRecord<String, String>> kms) throws IOException {
+
+       }
+
+       public void sendMessagesNew(String topic, List<? extends message> msgs) throws IOException {
+       }
 }
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
 
 import java.io.Serializable;
 
@@ -28,7 +28,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import com.att.nsa.drumlin.till.data.uniqueStringGenerator;
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 @XmlRootElement
@@ -8,36 +8,38 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
 
+import java.util.Date;
 import java.util.HashMap;
 import java.util.concurrent.TimeUnit;
 
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.exception.DMaaPResponseCode;
+import com.att.dmf.mr.exception.ErrorResponse;
+import com.att.dmf.mr.utils.Utils;
+
 //import org.slf4j.Logger;
 //import org.slf4j.LoggerFactory;
 
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.stereotype.Component;
-
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.exception.DMaaPResponseCode;
-import com.att.nsa.cambria.exception.ErrorResponse;
 import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
 import com.att.nsa.drumlin.till.nv.rrNvReadable;
 import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
@@ -47,7 +49,7 @@ import com.att.nsa.metrics.impl.CdmRateTicker;
 /**
  * class provide rate information
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 @Component
@@ -62,13 +64,19 @@ public class DMaaPCambriaLimiter {
        @Autowired
        public DMaaPCambriaLimiter(@Qualifier("propertyReader") rrNvReadable settings)
                        throws missingReqdSetting, invalidSettingValue {
-               fRateInfo = new HashMap<String, RateInfo>();
+                       fRateInfo = new HashMap<String, RateInfo>();
+               fRateInfoCheck = new HashMap<String, RateInfoCheck>();
                fMaxEmptyPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxEmptyPollsPerMinute,
                                CambriaConstants.kDefault_MaxEmptyPollsPerMinute);
+               fMaxPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxPollsPerMinute,
+                               30);
                fWindowLengthMins = settings.getInt(CambriaConstants.kSetting_RateLimitWindowLength,
                                CambriaConstants.kDefault_RateLimitWindowLength);
-               fSleepMs = settings.getLong(CambriaConstants.kSetting_MaxEmptyPollsPerMinute,
+               fSleepMs = settings.getLong(CambriaConstants.kSetting_SleepMsOnRateLimit,
                                CambriaConstants.kDefault_SleepMsOnRateLimit);
+               fSleepMs1 = settings.getLong(CambriaConstants.kSetting_SleepMsRealOnRateLimit,
+                               5000);
+               
        }
 
        /**
@@ -90,8 +98,8 @@ public class DMaaPCambriaLimiter {
         *            Pass <= 0 to deactivate rate limiting.
         *            @param windowLengthMins
         */
-       public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, int windowLengthMins) {
-               this(maxEmptyPollsPerMinute, windowLengthMins, getSleepMsForRate(maxEmptyPollsPerMinute));
+       public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, double maxPollsPerMinute,int windowLengthMins) {
+               this(maxEmptyPollsPerMinute,maxPollsPerMinute, windowLengthMins, getSleepMsForRate(maxEmptyPollsPerMinute),getSleepMsForRate(1));
        }
 
        /**
@@ -102,11 +110,14 @@ public class DMaaPCambriaLimiter {
         * @param sleepMs
         * @param windowLengthMins
         */
-       public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, int windowLengthMins, long sleepMs) {
+       public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute,double maxPollsPerMinute, int windowLengthMins, long sleepMs ,long sleepMS1) {
                fRateInfo = new HashMap<String, RateInfo>();
+               fRateInfoCheck = new HashMap<String, RateInfoCheck>();
                fMaxEmptyPollsPerMinute = Math.max(0, maxEmptyPollsPerMinute);
+               fMaxPollsPerMinute = Math.max(0, maxPollsPerMinute);
                fWindowLengthMins = windowLengthMins;
                fSleepMs = Math.max(0, sleepMs);
+               fSleepMs1 = Math.max(0, sleepMS1);
        }
 
        /**
@@ -118,40 +129,72 @@ public class DMaaPCambriaLimiter {
         * @param clientId
         * @throws CambriaApiException
         */
-       public void onCall(String topic, String consumerGroup, String clientId) throws CambriaApiException {
+       public void onCall(String topic, String consumerGroup, String clientId,String remoteHost) throws CambriaApiException {
                // do nothing if rate is configured 0 or less
                if (fMaxEmptyPollsPerMinute <= 0) {
                        return;
                }
-
-               // setup rate info for this tuple
+                               // setup rate info for this tuple
                final RateInfo ri = getRateInfo(topic, consumerGroup, clientId);
-
                final double rate = ri.onCall();
                log.info(ri.getLabel() + ": " + rate + " empty replies/minute.");
-
                if (rate > fMaxEmptyPollsPerMinute) {
                        try {
-                               log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxEmptyPollsPerMinute
+                               log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxPollsPerMinute
                                                + ".");
                                if (fSleepMs > 0) {
                                        log.warn(ri.getLabel() + ": " + "Slowing response with " + fSleepMs
                                                        + " ms sleep, then responding in error.");
                                        Thread.sleep(fSleepMs);
+                                       
                                } else {
                                        log.info(ri.getLabel() + ": " + "No sleep configured, just throwing error.");
                                }
                        } catch (InterruptedException e) {
-                               log.error(e.toString());
-                               Thread.currentThread().interrupt();
+                               // ignore
                        }
+                       
+                       
                        ErrorResponse errRes = new ErrorResponse(HttpStatusCodes.k429_tooManyRequests, 
                                        DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(), 
                                        "This client is making too many requests. Please use a long poll "
-                                                       + "setting to decrease the number of requests that result in empty responses. ");
+                                                       + "setting to decrease the number of requests that result in empty responses. ","",Utils.getFormattedDate(new Date()),topic,"","",consumerGroup+"/"+clientId,remoteHost);
+                       
                        log.info(errRes.toString());
                        throw new CambriaApiException(errRes);
                }
+               /*if (fMaxPollsPerMinute <= 0) {
+                       return;
+               }
+               
+               final RateInfoCheck ric = getRateInfoCheck(topic, consumerGroup, clientId);
+               final double ratevalue = ric.onCall();
+               if (ratevalue > fMaxPollsPerMinute) {
+                       try {
+                               log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxEmptyPollsPerMinute
+                                               + ".");
+                               if (fSleepMs1 > fMaxPollsPerMinute) {
+                               log.warn(ri.getLabel() + ": " + "Slowing response with " + fSleepMs
+                                                       + " ms sleep, then responding in error.");
+                                       Thread.sleep(fSleepMs1);
+                                       ric.reset();
+                               } else {
+                                       log.info(ri.getLabel() + ": " + "No sleep configured, just throwing error.");
+                               }
+                       } catch (InterruptedException e) {
+                               // ignore
+                       }
+                       
+                       
+                       ErrorResponse errRes = new ErrorResponse(HttpStatusCodes.k429_tooManyRequests, 
+                                       DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(), 
+                                       "This client is making too many requests "
+                                                       + ",decrease the number of requests. ","",Utils.getFormattedDate(new Date()),topic,"","",consumerGroup+"/"+clientId,remoteHost);
+                       
+                       log.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               }*/
+               
        }
 
        /**
@@ -181,7 +224,7 @@ public class DMaaPCambriaLimiter {
                        fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES,
                                        windowLengthMinutes, TimeUnit.MINUTES);
                }
-
+               
                public String getLabel() {
                        return fLabel;
                }
@@ -205,13 +248,57 @@ public class DMaaPCambriaLimiter {
                private final String fLabel;
                private final CdmRateTicker fCallRateSinceLastMsgSend;
        }
+       
+       
+       
+       private static class RateInfoCheck {
+               /**
+                * constructor initialzes
+                * 
+                * @param label
+                * @param windowLengthMinutes
+                */
+               public RateInfoCheck(String label, int windowLengthMinutes) {
+                       fLabel = label;
+                       fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES,
+                                       windowLengthMinutes, TimeUnit.MINUTES);
+               }
+
+               public String getLabel() {
+                       return fLabel;
+               }
+
+               /**
+                * CdmRateTicker is reset
+                */
+               public void reset() {
+                       fCallRateSinceLastMsgSend.reset();
+               }
+
+               /**
+                * 
+                * @return
+                */
+               public double onCall() {
+                       fCallRateSinceLastMsgSend.tick();
+                       return fCallRateSinceLastMsgSend.getRate();
+               }
 
+               private final String fLabel;
+               private final CdmRateTicker fCallRateSinceLastMsgSend;
+       }
+       
+       
        private final HashMap<String, RateInfo> fRateInfo;
+       private final HashMap<String, RateInfoCheck> fRateInfoCheck;
        private final double fMaxEmptyPollsPerMinute;
+       private final double fMaxPollsPerMinute;
        private final int fWindowLengthMins;
        private final long fSleepMs;
+       private final long fSleepMs1;
        //private static final Logger log = LoggerFactory.getLogger(DMaaPCambriaLimiter.class);
        private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPCambriaLimiter.class);
+       
        private RateInfo getRateInfo(String topic, String consumerGroup, String clientId) {
                final String key = makeKey(topic, consumerGroup, clientId);
                RateInfo ri = fRateInfo.get(key);
@@ -221,7 +308,21 @@ public class DMaaPCambriaLimiter {
                }
                return ri;
        }
+       
+       
+       private RateInfoCheck getRateInfoCheck(String topic, String consumerGroup, String clientId) {
+               final String key = makeKey(topic, consumerGroup, clientId);
+               RateInfoCheck ri = fRateInfoCheck.get(key);
+               if (ri == null) {
+                       ri = new RateInfoCheck(key, 1);
+                       fRateInfoCheck.put(key, ri);
+               }
+               return ri;
+       }
 
+       
+       
+       
        private String makeKey(String topic, String group, String id) {
                return topic + "::" + group + "::" + id;
        }
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
 
 import java.text.SimpleDateFormat;
 import java.util.Date;
@@ -28,12 +28,12 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.servlet.http.HttpSession;
 
-import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.dmf.mr.utils.ConfigurationReader;
 
 /**
  * DMaaPContext provide and maintain all the configurations , Http request/response
  * Session and consumer Request Time
- * @author author
+ * @author nilanjana.maity
  *
  */
 public class DMaaPContext {
@@ -85,7 +85,7 @@ public class DMaaPContext {
     }
 
     public ConfigurationReader getConfigReader() {
-        return configReader;
+         return configReader;
     }
 
     public void setConfigReader(ConfigurationReader configReader) {
diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java b/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java
new file mode 100644 (file)
index 0000000..6fc0838
--- /dev/null
@@ -0,0 +1,365 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.dmf.mr.beans;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Properties;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.recipes.locks.InterProcessMutex;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.ConsumerFactory;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.dmf.mr.backends.kafka.Kafka011Consumer;
+import com.att.dmf.mr.backends.kafka.Kafka011ConsumerUtil;
+import com.att.dmf.mr.backends.kafka.KafkaConsumerCache;
+import com.att.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
+import com.att.dmf.mr.backends.kafka.KafkaLiveLockAvoider2;
+import com.att.dmf.mr.backends.kafka.LiveLockAvoidance;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.utils.ConfigurationReader;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+
+/**
+ * @author nilanjana.maity
+ *
+ */
+public class DMaaPKafkaConsumerFactory implements ConsumerFactory {
+
+       // private static final Logger log = LoggerFactory
+       // .getLogger(DMaaPKafkaConsumerFactory.class);
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPKafkaConsumerFactory.class);
+       // @Autowired
+       // private KafkaLiveLockAvoider kafkaLiveLockAvoider = new
+       // KafkaLiveLockAvoider();
+
+       /**
+        * constructor initialization
+        * 
+        * @param settings
+        * @param metrics
+        * @param curator
+        * @throws missingReqdSetting
+        * @throws KafkaConsumerCacheException
+        * @throws UnknownHostException
+        */
+
+       public DMaaPKafkaConsumerFactory(@Qualifier("dMaaPMetricsSet") MetricsSet metrics,
+                       @Qualifier("curator") CuratorFramework curator,
+                       @Qualifier("kafkalockavoid") KafkaLiveLockAvoider2 kafkaLiveLockAvoider)
+                       throws missingReqdSetting, KafkaConsumerCacheException, UnknownHostException {
+
+               String apiNodeId = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               CambriaConstants.kSetting_ApiNodeIdentifier);
+               if (apiNodeId == null) {
+
+                       apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port;
+               }
+
+               log.info("This Cambria API Node identifies itself as [" + apiNodeId + "].");
+               final String mode = CambriaConstants.DMAAP;
+
+               fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "kafka.metadata.broker.list");
+               if (null == fkafkaBrokers) {
+
+                       fkafkaBrokers = "localhost:9092";
+               }
+
+               boolean kSetting_EnableCache = kDefault_IsCacheEnabled;
+               String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "cambria.consumer.cache.enabled");
+               if (null != strkSetting_EnableCache)
+                       kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
+
+               final boolean isCacheEnabled = kSetting_EnableCache;
+
+               // fCache = (isCacheEnabled) ? new KafkaConsumerCache(apiNodeId,
+               // metrics) : null;
+               fCache = null;
+               if (isCacheEnabled) {
+                       fCache = KafkaConsumerCache.getInstance();
+
+               }
+               if (fCache != null) {
+                       fCache.setfMetrics(metrics);
+                       fCache.setfApiId(apiNodeId);
+                       fCache.startCache(mode, curator);
+                       if(kafkaLiveLockAvoider!=null){
+                       kafkaLiveLockAvoider.startNewWatcherForServer(apiNodeId, makeAvoidanceCallback(apiNodeId));
+                       fkafkaLiveLockAvoider = kafkaLiveLockAvoider;
+                       }
+               }
+       }
+
+       /*
+        * getConsumerFor
+        * 
+        * @see
+        * com.att.dmf.mr.backends.ConsumerFactory#getConsumerFor(java.lang.String,
+        * java.lang.String, java.lang.String, int, java.lang.String) This method is
+        * used by EventServiceImpl.getEvents() method to get a Kakfa consumer
+        * either from kafkaconsumer cache or create a new connection This also get
+        * the list of other consumer objects for the same consumer group and set to
+        * KafkaConsumer object. This list may be used during poll-rebalancing
+        * issue.
+        */
+       @Override
+       public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs,
+                       String remotehost) throws UnavailableException, CambriaApiException {
+               Kafka011Consumer kc;
+
+               // To synchronize based on the consumer group.
+
+               Object syncObject = synchash.get(topic + consumerGroupName);
+               if (null == syncObject) {
+                       syncObject = new Object();
+                       synchash.put(topic + consumerGroupName, syncObject);
+               }
+
+               synchronized (syncObject) {
+                       try {
+                               kc = (fCache != null) ? fCache.getConsumerFor(topic, consumerGroupName, consumerId) : null; // consumerId
+
+                       } catch (KafkaConsumerCacheException e) {
+                               log.info("######@@@@### Error occured in Kafka Caching" + e + "  " + topic + "::" + consumerGroupName
+                                               + "::" + consumerId);
+                               log.error("####@@@@## Error occured in Kafka Caching" + e + "  " + topic + "::" + consumerGroupName
+                                               + "::" + consumerId);
+                               throw new UnavailableException(e);
+                       }
+
+                       // Ideally if cache exists below flow should be skipped. If cache
+                       // didnt
+                       // exist, then create this first time on this node.
+                       if (kc == null) {
+
+                               log.info("^Kafka consumer cache value " + topic + "::" + consumerGroupName + "::" + consumerId + " =>"
+                                               + kc);
+
+                               final InterProcessMutex ipLock = new InterProcessMutex(ConfigurationReader.getCurator(),
+                                               "/consumerFactory/" + topic + "/" + consumerGroupName + "/" + consumerId);
+                               boolean locked = false;
+
+                               try {
+
+                                       locked = ipLock.acquire(30, TimeUnit.SECONDS);
+                                       if (!locked) {
+
+                                               log.info("Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic
+                                                               + ", " + consumerGroupName + ", " + consumerId + ") from " + remotehost);
+                                               throw new UnavailableException(
+                                                               "Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic
+                                                                               + ", " + consumerGroupName + ", " + consumerId + ") " + remotehost);
+                                       }
+
+                                       // ConfigurationReader.getCurator().checkExists().forPath("S").
+
+                                       log.info("Creating Kafka consumer for group [" + consumerGroupName + "], consumer [" + consumerId
+                                                       + "], on topic [" + topic + "].");
+
+                                       fCache.signalOwnership(topic, consumerGroupName, consumerId);
+
+                                       final Properties props = createConsumerConfig(topic,consumerGroupName, consumerId);
+                                       long fCreateTimeMs = System.currentTimeMillis();
+                                       KafkaConsumer<String, String> cc = new KafkaConsumer<>(props);
+                                       kc = new Kafka011Consumer(topic, consumerGroupName, consumerId, cc, fkafkaLiveLockAvoider);// ,fCache.getkafkaLiveLockAvoiderObj()
+                                                                                                                                                                                                                               // );
+                                       log.info(" kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
+
+                                       if (fCache != null) {
+                                               fCache.putConsumerFor(topic, consumerGroupName, consumerId, kc); //
+                                       }
+
+                               } catch (org.I0Itec.zkclient.exception.ZkTimeoutException x) {
+                                       log.info(
+                                                       "Kafka consumer couldn't connect to ZK. " + x + " " + consumerGroupName + "/" + consumerId);
+                                       throw new UnavailableException("Couldn't connect to ZK.");
+                               } catch (KafkaConsumerCacheException e) {
+                                       log.info("Failed to cache consumer (this may have performance implications): " + e.getMessage()
+                                                       + " " + consumerGroupName + "/" + consumerId);
+                               } catch (UnavailableException u) {
+                                       log.info("Failed and in UnavailableException block " + u.getMessage() + " " + consumerGroupName
+                                                       + "/" + consumerId);
+                                       throw new UnavailableException("Error while acquiring consumer factory lock " + u.getMessage(), u);
+                               } catch (Exception e) {
+                                       log.info("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/"
+                                                       + consumerId);
+                                       log.error("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/"
+                                                       + consumerId);
+                                       
+                               } finally {
+                                       if (locked) {
+                                               try {
+                                                       ipLock.release();
+                                               } catch (Exception e) {
+                                                       throw new UnavailableException("Error while releasing consumer factory lock" + e, e);
+                                               }
+                                       }
+                               }
+                       }
+               }
+               return kc;
+       }
+
+       @Override
+       public synchronized void destroyConsumer(String topic, String consumerGroup, String clientId) {
+               if (fCache != null) {
+                       fCache.dropConsumer(topic, consumerGroup, clientId);
+               }
+       }
+
+       @Override
+       public synchronized Collection<? extends Consumer> getConsumers() {
+               return fCache.getConsumers();
+       }
+
+       @Override
+       public synchronized void dropCache() {
+               fCache.dropAllConsumers();
+       }
+
+       
+       private KafkaConsumerCache fCache;
+       private KafkaLiveLockAvoider2 fkafkaLiveLockAvoider;
+       private String fkafkaBrokers;
+
+
+
+       private static String makeLongKey(String key, String prefix) {
+               return prefix + "." + key;
+       }
+
+       private void transferSettingIfProvided(Properties target, String key, String prefix) {
+               String keyVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, makeLongKey(key, prefix));
+
+               // if (fSettings.hasValueFor(makeLongKey(key, prefix))) {
+               if (null != keyVal) {
+                       // final String val = fSettings
+                       // .getString(makeLongKey(key, prefix), "");
+                       log.info("Setting [" + key + "] to " + keyVal + ".");
+                       target.put(key, keyVal);
+               }
+       }
+
+       /**
+        * Name CreateConsumerconfig  
+        * @param topic
+        * @param groupId
+        * @param consumerId
+        * @return Properties
+        * 
+        * This method is to create Properties required to create kafka connection
+        * Group name is replaced with different format groupid--topic to address same 
+        * groupids for multiple topics. Same groupid with multiple topics 
+        * may start frequent consumer rebalancing on all the topics . Replacing them makes it unique
+        */
+       private Properties createConsumerConfig(String topic ,String groupId, String consumerId) {
+               final Properties props = new Properties();
+               //fakeGroupName is added to avoid multiple consumer group for multiple topics.Donot Change this logic
+               //Fix for CPFMF-644 : 
+               final String fakeGroupName = groupId + "--" + topic;
+               props.put("group.id", fakeGroupName);
+               props.put("enable.auto.commit", "false"); // 0.11
+               props.put("bootstrap.servers", fkafkaBrokers);
+               /*props.put("sasl.jaas.config",
+                               "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+               props.put("security.protocol", "SASL_PLAINTEXT");
+               props.put("sasl.mechanism", "PLAIN");*/
+               props.put("client.id", consumerId);
+
+               // additional settings: start with our defaults, then pull in configured
+               // overrides
+               populateKafkaInternalDefaultsMap();
+               for (String key : KafkaConsumerKeys) {
+                       transferSettingIfProvided(props, key, "kafka");
+               }
+
+               props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+               props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+
+               return props;
+       }
+
+
+       private static final String KafkaConsumerKeys[] = { "bootstrap.servers", "heartbeat.interval.ms",
+                       "auto.offset.reset", "exclude.internal.topics", "session.timeout.ms", "fetch.max.bytes",
+                       "auto.commit.interval.ms", "connections.max.idle.ms", "fetch.min.bytes", "isolation.level",
+                       "fetch.max.bytes", "request.timeout.ms", "fetch.max.wait.bytes", "reconnect.backoff.max.ms",
+                       "max.partition.fetch.bytes", "reconnect.backoff.max.ms", "reconnect.backoff.ms", "retry.backoff.ms",
+                       "max.poll.interval.ms", "max.poll.records", "receive.buffer.bytes", "metadata.max.age.ms" };
+
+       /**
+        * putting values in hashmap like consumer timeout, zookeeper time out, etc
+        * 
+        * @param setting
+        */
+       private static void populateKafkaInternalDefaultsMap() { }
+
+       /*
+        * The starterIncremnt value is just to emulate calling certain consumers,
+        * in this test app all the consumers are local
+        * 
+        */
+       private LiveLockAvoidance makeAvoidanceCallback(final String appId) {
+
+               return new LiveLockAvoidance() {
+
+                       @Override
+                       public String getAppId() {
+                               return appId;
+                       }
+
+                       @Override
+                       public void handleRebalanceUnlock(String groupName) {
+                               log.info("FORCE A POLL NOW FOR appId: [{}] group: [{}]", getAppId(), groupName);
+                               Kafka011ConsumerUtil.forcePollOnConsumer(groupName + "::");
+                       }
+
+               };
+
+       }
+
+       @SuppressWarnings("rawtypes")
+       @Override
+       public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs,
+                       String remotehost) throws UnavailableException, CambriaApiException {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       private HashMap<String, Object> synchash = new HashMap<String, Object>();
+
+}
\ No newline at end of file
@@ -8,39 +8,49 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
 
+import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Properties;
 import java.util.Set;
 import java.util.TreeSet;
+import java.util.concurrent.ExecutionException;
 
 import org.I0Itec.zkclient.ZkClient;
 import org.I0Itec.zkclient.exception.ZkNoNodeException;
+import org.apache.kafka.clients.admin.AdminClient;
+import org.apache.kafka.clients.admin.AdminClientConfig;
+import org.apache.kafka.clients.admin.CreateTopicsResult;
+import org.apache.kafka.clients.admin.NewTopic;
+import org.apache.kafka.common.KafkaFuture;
+import org.json.JSONObject;
+import org.json.JSONArray;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.metabroker.Broker;
+import com.att.dmf.mr.metabroker.Broker1;
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.utils.ConfigurationReader;
 //import org.apache.log4-j.Logger;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
-
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.metabroker.Broker;
-import com.att.nsa.cambria.metabroker.Topic;
-import com.att.nsa.cambria.utils.ConfigurationReader;
+//import com.att.dmf.mr.backends.kafka.kafka011.SettingsUtil;
 import com.att.nsa.configs.ConfigDb;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.configs.ConfigPath;
@@ -50,20 +60,42 @@ import com.att.nsa.security.NsaAcl;
 import com.att.nsa.security.NsaAclUtils;
 import com.att.nsa.security.NsaApiKey;
 
-import kafka.admin.AdminUtils;
-import kafka.utils.ZKStringSerializer$;
 
 /**
  * class performing all topic operations
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
-
-public class DMaaPKafkaMetaBroker implements Broker {
+//@Component
+public class DMaaPKafkaMetaBroker implements Broker1 {
+
+       public DMaaPKafkaMetaBroker() {
+               fZk = null;
+               fCambriaConfig = null;
+               fBaseTopicData = null;
+               final Properties props = new Properties ();
+               String fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "kafka.metadata.broker.list");
+               if (null == fkafkaBrokers) {
+
+                       fkafkaBrokers = "localhost:9092";
+               }
+               
+               
+               
+            props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers );
+           /* props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+                props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");            
+            props.put("sasl.mechanism", "PLAIN");*/
+            fKafkaAdminClient=AdminClient.create ( props );
+           // fKafkaAdminClient = null;
+       }
 
        //private static final Logger log = Logger.getLogger(DMaaPKafkaMetaBroker.class);
        private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
+       private final AdminClient fKafkaAdminClient;
+       
        
 
        /**
@@ -79,6 +111,36 @@ public class DMaaPKafkaMetaBroker implements Broker {
                fZk = zk;
                fCambriaConfig = configDb;
                fBaseTopicData = configDb.parse("/topics");
+               final Properties props = new Properties ();
+               String fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "kafka.metadata.broker.list");
+               if (null == fkafkaBrokers) {
+
+                       fkafkaBrokers = "localhost:9092";
+               }
+               
+               
+               
+            props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers );
+            /* props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+                props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");            
+            props.put("sasl.mechanism", "PLAIN");*/
+            fKafkaAdminClient=AdminClient.create ( props );
+           // fKafkaAdminClient = null;
+               
+               
+       }
+       
+       public DMaaPKafkaMetaBroker( rrNvReadable settings,
+                       ZkClient zk,  ConfigDb configDb,AdminClient client) {
+               //fSettings = settings;
+               fZk = zk;
+               fCambriaConfig = configDb;
+               fBaseTopicData = configDb.parse("/topics");
+           fKafkaAdminClient= client;
+           // fKafkaAdminClient = null;
+               
+               
        }
 
        @Override
@@ -91,7 +153,6 @@ public class DMaaPKafkaMetaBroker implements Broker {
                        for (String topic : topics) {
                                result.add(new KafkaTopic(topic, fCambriaConfig, fBaseTopicData));
                        }
-
                        JSONObject dataObj = new JSONObject();
                        dataObj.put("topics", new JSONObject());
 
@@ -132,7 +193,7 @@ public class DMaaPKafkaMetaBroker implements Broker {
         */
        @Override
        public Topic createTopic(String topic, String desc, String ownerApiKey, int partitions, int replicas,
-                       boolean transactionEnabled) throws TopicExistsException, CambriaApiException {
+                       boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException {
                log.info("Creating topic: " + topic);
                try {
                        log.info("Check if topic [" + topic + "] exist.");
@@ -162,78 +223,53 @@ public class DMaaPKafkaMetaBroker implements Broker {
                }
 
                // create via kafka
-               try {
-                       ZkClient zkClient = null;
-                       try {
-                               log.info("Loading zookeeper client for creating topic.");
-                               // FIXME: use of this scala module$ thing is a goofy hack to
-                               // make Kafka aware of the
-                               // topic creation. (Otherwise, the topic is only partially
-                               // created in ZK.)
-                               zkClient = ZkClientFactory.createZkClient();
-                               log.info("Zookeeper client loaded successfully. Creating topic.");
-                               AdminUtils.createTopic(zkClient, topic, partitions, replicas, new Properties());
-                       } catch (kafka.common.TopicExistsException e) {
-                               log.error("Topic [" + topic + "] could not be created. " + e.getMessage(), e);
-                               throw new TopicExistsException(topic);
-                       } catch (ZkNoNodeException e) {
-                               log.error("Topic [" + topic + "] could not be created. The Kafka cluster is not setup.", e);
-                               // Kafka throws this when the server isn't running (and perhaps
-                               // hasn't ever run)
-                               throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
-                                               "The Kafka cluster is not setup.");
-                       } catch (kafka.admin.AdminOperationException e) {
-                               // Kafka throws this when the server isn't running (and perhaps
-                               // hasn't ever run)
-                               log.error("The Kafka cluster can't handle your request. Talk to the administrators: " + e.getMessage(),
-                                               e);
-                               throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
-                                               "The Kafka cluster can't handle your request. Talk to the administrators.");
-                       } finally {
-                               log.info("Closing zookeeper connection.");
-                               if (zkClient != null)
-                                       zkClient.close();
+               
+                       try
+                       {
+                               final NewTopic topicRequest = new NewTopic ( topic, partitions, new Integer(replicas).shortValue () );
+                               final CreateTopicsResult ctr = fKafkaAdminClient.createTopics ( Arrays.asList ( topicRequest ) );
+                               final KafkaFuture<Void> ctrResult = ctr.all ();
+                               ctrResult.get ();
+                               // underlying Kafka topic created. now setup our API info
+                               return createTopicEntry ( topic, desc, ownerApiKey, transactionEnabled );
                        }
-
-                       log.info("Creating topic entry for topic: " + topic);
-                       // underlying Kafka topic created. now setup our API info
-                       return createTopicEntry(topic, desc, ownerApiKey, transactionEnabled);
-               } catch (ConfigDbException excp) {
-                       log.error("Failed to create topic data. Talk to the administrators: " + excp.getMessage(), excp);
-                       throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
-                                       "Failed to create topic data. Talk to the administrators.");
-               }
+                       catch ( InterruptedException e )
+                       {
+                               //timer.fail ( "Timeout" );
+                               log.warn ( "Execution of describeTopics timed out." );
+                               throw new ConfigDbException ( e );
+                       }
+                       catch ( ExecutionException e )
+                       {
+                               //timer.fail ( "ExecutionError" );
+                               log.warn ( "Execution of describeTopics failed: " + e.getCause ().getMessage (), e.getCause () );
+                               throw new ConfigDbException ( e.getCause () );
+                       }
+               
        }
 
        @Override
-       public void deleteTopic(String topic) throws CambriaApiException, TopicExistsException {
+       public void deleteTopic(String topic) throws CambriaApiException, TopicExistsException,ConfigDbException {
                log.info("Deleting topic: " + topic);
                ZkClient zkClient = null;
                try {
                        log.info("Loading zookeeper client for topic deletion.");
-                       // FIXME: use of this scala module$ thing is a goofy hack to make
-                       // Kafka aware of the
-                       // topic creation. (Otherwise, the topic is only partially created
+                                       // topic creation. (Otherwise, the topic is only partially created
                        // in ZK.)
-                       zkClient = ZkClientFactory.createZkClient();
-
+                       /*zkClient = new ZkClient(ConfigurationReader.getMainZookeeperConnectionString(), 10000, 10000,
+                                       ZKStringSerializer$.MODULE$);
+                       String strkSettings_KafkaZookeeper = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
+                       if (null==strkSettings_KafkaZookeeper) strkSettings_KafkaZookeeper = CambriaConstants.kDefault_ZkConfigDbServers;
+                       ZkUtils zkutils =new ZkUtils(zkClient , new ZkConnection(strkSettings_KafkaZookeeper),false);
+                       */
+                       
+                       fKafkaAdminClient.deleteTopics(Arrays.asList(topic));
                        log.info("Zookeeper client loaded successfully. Deleting topic.");
-                       AdminUtils.deleteTopic(zkClient, topic);
-               } catch (kafka.common.TopicExistsException e) {
+                       //AdminUtils.deleteTopic(zkutils, topic);
+               } catch (Exception e) {
                        log.error("Failed to delete topic [" + topic + "]. " + e.getMessage(), e);
-                       throw new TopicExistsException(topic);
-               } catch (ZkNoNodeException e) {
-                       log.error("Failed to delete topic [" + topic + "]. The Kafka cluster is not setup." + e.getMessage(), e);
-                       // Kafka throws this when the server isn't running (and perhaps
-                       // hasn't ever run)
-                       throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable, "The Kafka cluster is not setup.");
-               } catch (kafka.admin.AdminOperationException e) {
-                       // Kafka throws this when the server isn't running (and perhaps
-                       // hasn't ever run)
-                       log.error("The Kafka cluster can't handle your request. Talk to the administrators." + e.getMessage(), e);
-                       throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
-                                       "The Kafka cluster can't handle your request. Talk to the administrators.");
-               } finally {
+                       throw new ConfigDbException(e);
+               }  finally {
                        log.info("Closing zookeeper connection.");
                        if (zkClient != null)
                                zkClient.close();
@@ -242,8 +278,6 @@ public class DMaaPKafkaMetaBroker implements Broker {
                // throw new UnsupportedOperationException ( "We can't programmatically
                // delete Kafka topics yet." );
        }
-       
-       
 
        //private final rrNvReadable fSettings;
        private final ZkClient fZk;
@@ -295,7 +329,7 @@ public class DMaaPKafkaMetaBroker implements Broker {
         * class performing all user opearation like user is eligible to read,
         * write. permitting a user to write and read,
         * 
-        * @author author
+        * @author anowarul.islam
         *
         */
        public static class KafkaTopic implements Topic {
@@ -328,25 +362,27 @@ public class DMaaPKafkaMetaBroker implements Broker {
                                                // created topics.
                                                JSONObject readers = o.optJSONObject ( "readers" );
                                                if ( readers == null && fOwner.length () > 0 ) readers = kEmptyAcl;
-                                               fReaders = fromJson ( readers );
+                                               fReaders =  fromJson ( readers );
 
                                                JSONObject writers = o.optJSONObject ( "writers" );
                                                if ( writers == null && fOwner.length () > 0 ) writers = kEmptyAcl;
                                                fWriters = fromJson ( writers );
                }
-                private NsaAcl fromJson(JSONObject o) {
-                               NsaAcl acl = new NsaAcl();
-                               if (o != null) {
-                                       JSONArray a = o.optJSONArray("allowed");
-                                       if (a != null) {
-                                               for (int i = 0; i < a.length(); ++i) {
-                                                       String user = a.getString(i);
-                                                       acl.add(user);
-                                               }
+               
+               private NsaAcl fromJson(JSONObject o) {
+                       NsaAcl acl = new NsaAcl();
+                       if (o != null) {
+                               JSONArray a = o.optJSONArray("allowed");
+                               if (a != null) {
+                                       for (int i = 0; i < a.length(); ++i) {
+                                               String user = a.getString(i);
+                                               acl.add(user);
                                        }
                                }
-                               return acl;
                        }
+                       return acl;
+               }
+
                @Override
                public String getName() {
                        return fName;
@@ -445,7 +481,7 @@ public class DMaaPKafkaMetaBroker implements Broker {
                private final NsaAcl fReaders;
                private final NsaAcl fWriters;
                private boolean fTransactionEnabled;
-
+       
                public boolean isTransactionEnabled() {
                        return fTransactionEnabled;
                }
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
 
 import java.text.SimpleDateFormat;
 import java.util.Date;
@@ -28,9 +28,9 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 
-import com.att.nsa.apiServer.metrics.cambria.DMaaPMetricsSender;
-import com.att.nsa.cambria.CambriaApiVersionInfo;
-import com.att.nsa.cambria.backends.MetricsSet;
+import com.att.dmf.mr.CambriaApiVersionInfo;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.mr.apiServer.metrics.cambria.DMaaPMetricsSender;
 import com.att.nsa.drumlin.till.nv.rrNvReadable;
 import com.att.nsa.metrics.impl.CdmConstant;
 import com.att.nsa.metrics.impl.CdmCounter;
@@ -45,7 +45,7 @@ import com.att.nsa.metrics.impl.CdmTimeSince;
 /**
  * Metrics related information
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class DMaaPMetricsSet extends CdmMetricsRegistryImpl implements MetricsSet {
@@ -87,7 +87,6 @@ public class DMaaPMetricsSet extends CdmMetricsRegistryImpl implements MetricsSe
        //public DMaaPMetricsSet() {
                public DMaaPMetricsSet(rrNvReadable cs) {
                //fSettings = cs;
-
                fVersion = new CdmStringConstant("Version " + CambriaApiVersionInfo.getVersion());
                super.putItem("version", fVersion);
 
@@ -8,26 +8,27 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
 
 import java.security.Key;
 
 //import org.apache.log4-j.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
+
+import com.att.dmf.mr.constants.CambriaConstants;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.cambria.constants.CambriaConstants;
 import com.att.nsa.configs.ConfigDb;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.configs.confimpl.EncryptingLayer;
@@ -42,7 +43,7 @@ import com.att.nsa.util.rrConvertor;
 
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class DMaaPNsaApiDb {
@@ -8,28 +8,28 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
 
 import org.I0Itec.zkclient.ZkClient;
 import org.springframework.beans.factory.annotation.Qualifier;
 
-import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.dmf.mr.utils.ConfigurationReader;
 import com.att.nsa.drumlin.till.nv.rrNvReadable;
 
 /**
  * Created for Zookeeper client which will read configuration and settings parameter
- * @author author
+ * @author nilanjana.maity
  *
  */
 public class DMaaPZkClient extends ZkClient {
@@ -8,29 +8,28 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
 
 import org.springframework.beans.factory.annotation.Qualifier;
 
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.dmf.mr.utils.ConfigurationReader;
 import com.att.nsa.configs.confimpl.ZkConfigDb;
 import com.att.nsa.drumlin.till.nv.rrNvReadable;
 //import com.att.nsa.configs.confimpl.ZkConfigDb;
 /**
  * Provide the zookeeper config db connection 
- * @author author
+ * @author nilanjana.maity
  *
  */
 public class DMaaPZkConfigDb extends ZkConfigDb {
@@ -1,3 +1,6 @@
+/**
+ * 
+ */
 /*******************************************************************************
  *  ============LICENSE_START=======================================================
  *  org.onap.dmaap
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-/**
- * 
- */
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
 
 import java.util.Date;
 
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.utils.Utils;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.utils.Utils;
 
 /**
- * @author author
+ * @author muzainulhaque.qazi
  *
  */
 
@@ -1,3 +1,6 @@
+/**
+ * 
+ */
 /*******************************************************************************
  *  ============LICENSE_START=======================================================
  *  org.onap.dmaap
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-/**
- * 
- */
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
 
 import java.io.Serializable;
 
 import javax.xml.bind.annotation.XmlRootElement;
 
 /**
- * @author author
+ * @author muzainulhaque.qazi
  *
  */
 @XmlRootElement
@@ -39,8 +39,8 @@ public class TopicBean implements Serializable {
        private String topicName;
        private String topicDescription;
 
-       private int partitionCount = 1; //default values
-       private int replicationCount = 1; //default value
+       private int partitionCount;
+       private int replicationCount;
 
        private boolean transactionEnabled;
 
@@ -8,26 +8,24 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.constants;
-
-import org.apache.coyote.http11.Http11NioProtocol;
+package com.att.dmf.mr.constants;
 
-import com.att.nsa.cambria.utils.Utils;
+import com.att.dmf.mr.utils.Utils;
 
 /**
  * This is the constant files for all the property or parameters.
- * @author author
+ * @author nilanjana.maity
  *
  */
 public interface CambriaConstants {
@@ -55,9 +53,11 @@ String msgRtr_prop="MsgRtrApi.properties";
         * value to use to signal max empty poll per minute
         */
        String kSetting_MaxEmptyPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute";
+       String kSetting_MaxPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute";
        double kDefault_MaxEmptyPollsPerMinute = 10.0;
 
        String kSetting_SleepMsOnRateLimit = "cambria.rateLimit.delay.ms";
+       String kSetting_SleepMsRealOnRateLimit = "cambria.rateLimitActual.delay.ms";
        long kDefault_SleepMsOnRateLimit = Utils.getSleepMsForRate ( kDefault_MaxEmptyPollsPerMinute );
 
        String kSetting_RateLimitWindowLength = "cambria.rateLimit.window.minutes";
@@ -8,20 +8,20 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.exception;
+package com.att.dmf.mr.exception;
 
-import com.att.nsa.cambria.CambriaApiException;
+import com.att.dmf.mr.CambriaApiException;
 
 public class DMaaPAccessDeniedException extends CambriaApiException{
        
@@ -8,36 +8,36 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.exception;
+package com.att.dmf.mr.exception;
 
 import javax.inject.Singleton;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import javax.ws.rs.ext.ExceptionMapper;
 import javax.ws.rs.ext.Provider;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.apache.http.HttpStatus;
 
+import org.apache.http.HttpStatus;
 import org.springframework.beans.factory.annotation.Autowired;
 
-import com.att.nsa.cambria.CambriaApiException;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
 
 /**
  * Exception Mapper class to handle
  * CambriaApiException 
- * @author author
+ * @author rajashree.khare
  *
  */
 @Provider
@@ -8,66 +8,91 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.exception;
+package com.att.dmf.mr.exception;
+
+import javax.annotation.PostConstruct;
 
+import org.springframework.beans.factory.annotation.Value;
 import org.springframework.stereotype.Component;
+import org.springframework.web.context.support.SpringBeanAutowiringSupport;
 
 /**
  * This Class reads the error message properties
  * from the properties file
- * @author author
+ * @author rajashree.khare
  *
  */
 @Component
 public class DMaaPErrorMessages {
 
-       private String notFound="The requested resource was not found.Please verify the URL and try again.";
        
+                       
+
+       //@Value("${resource.not.found}")
+       private String notFound="The requested resource was not found.Please verify the URL and try again";
+       
+//     @Value("${server.unavailable}")
        private String serverUnav="Server is temporarily unavailable or busy.Try again later, or try another server in the cluster.";
        
+//     @Value("${http.method.not.allowed}")
        private String methodNotAllowed="The specified HTTP method is not allowed for the requested resource.Enter a valid HTTP method and try again.";
        
+       //@Value("${incorrect.request.json}")
        private String badRequest="Incorrect JSON object. Please correct the JSON format and try again.";
        
+//     @Value("${network.time.out}")
        private String nwTimeout="Connection to the DMaaP MR was timed out.Please try again.";
        
+       //@Value("${get.topic.failure}")
        private String topicsfailure="Failed to retrieve list of all topics.";
        
+       //@Value("${not.permitted.access.1}")
        private String notPermitted1="Access Denied.User does not have permission to perform";
        
-       private String notPermitted2="operation on Topic";
+       //@Value("${not.permitted.access.2}")
+       private String notPermitted2="operation on Topic:";
        
+       //@Value("${get.topic.details.failure}")
        private String topicDetailsFail="Failed to retrieve details of topic:";
        
+       //@Value("${create.topic.failure}")
        private String createTopicFail="Failed to create topic:";
        
+       //@Value("${delete.topic.failure}")
        private String deleteTopicFail="Failed to delete topic:";
        
+       //@Value("${incorrect.json}")
        private String incorrectJson="Incorrect JSON object.Could not parse JSON. Please correct the JSON format and try again.";
        
+       //@Value("${consume.msg.error}")
        private String consumeMsgError="Error while reading data from topic.";
        
+       //@Value("${publish.msg.error}")
        private String publishMsgError="Error while publishing data to topic.";
        
        
+       //@Value("${publish.msg.count}")
        private String publishMsgCount="Successfully published number of messages :";
        
        
+       //@Value("${authentication.failure}")
        private String authFailure="Access Denied: Invalid Credentials. Enter a valid MechId and Password and try again.";
+       //@Value("${msg_size_exceeds}")
        private String msgSizeExceeds="Message size exceeds the default size.";
        
        
+       //@Value("${topic.not.exist}")
        private String topicNotExist="No such topic exists.";
        
        public String getMsgSizeExceeds() {
@@ -215,6 +240,9 @@ public class DMaaPErrorMessages {
        }
        
        
-       
+       @PostConstruct
+       public void init() {
+           SpringBeanAutowiringSupport.processInjectionBasedOnCurrentContext(this);
+       }
        
 }
@@ -8,23 +8,23 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.exception;
+package com.att.dmf.mr.exception;
 
 /**
  * Define the Error Response Codes for MR
  * using this enumeration
- * @author author
+ * @author rajashree.khare
  *
  */
 public enum DMaaPResponseCode {
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.exception;
+package com.att.dmf.mr.exception;
 
 import javax.inject.Singleton;
 import javax.ws.rs.BadRequestException;
@@ -34,16 +34,17 @@ import javax.ws.rs.core.Response;
 import javax.ws.rs.ext.ExceptionMapper;
 import javax.ws.rs.ext.Provider;
 
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
 import org.apache.http.HttpStatus;
 //import org.apache.log-4j.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
 /**
  * Exception Mapper class to handle
  * Jersey Exceptions
- * @author author
+ * @author rajashree.khare
  *
  */
 @Provider
@@ -8,24 +8,24 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.exception;
+package com.att.dmf.mr.exception;
 import org.json.JSONObject;
 /**
  * Represents the Error Response Object 
  * that is rendered as a JSON object when
  * an exception or error occurs on MR Rest Service.
- * @author author
+ * @author rajashree.khare
  *
  */
 //@XmlRootElement
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.listener;
+package com.att.dmf.mr.listener;
 
 import javax.servlet.ServletContextEvent;
 import javax.servlet.ServletContextListener;
@@ -29,7 +29,7 @@ import com.att.eelf.configuration.EELFManager;
 
 /**
  * This is the Cambria Servlet Context Listner which helpes while loading the app which provide the endpoints 
- * @author author
+ * @author nilanjana.maity
  *
  */
 public class CambriaServletContextListener implements ServletContextListener {
@@ -8,33 +8,33 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.listener;
+package com.att.dmf.mr.listener;
 
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.Properties;
 
+import com.att.aft.dme2.manager.registry.DME2EndpointRegistry;
 import com.att.aft.dme2.api.DME2Exception;
 import com.att.aft.dme2.api.DME2Manager;
-import com.att.aft.dme2.manager.registry.DME2EndpointRegistry;
+import com.att.dmf.mr.service.impl.EventsServiceImpl;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.cambria.service.impl.EventsServiceImpl;
 
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class DME2EndPointLoader {
@@ -8,35 +8,35 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.metabroker;
+package com.att.dmf.mr.metabroker;
 
 import java.util.List;
 
-import com.att.nsa.cambria.CambriaApiException;
+import com.att.dmf.mr.CambriaApiException;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
 
 /**
  * A broker interface to manage metadata around topics, etc.
  * 
- * @author author
+ * @author peter
  *
  */
 public interface Broker {
        /**
         * 
-        * @author author
+        * @author anowarul.islam
         *
         */
        public class TopicExistsException extends Exception {
@@ -81,12 +81,12 @@ public interface Broker {
         * @throws CambriaApiException
         */
        Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas,
-                       boolean transactionEnabled) throws TopicExistsException, CambriaApiException;
+                       boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException;
 
        /**
         * Delete a topic by name
         * 
         * @param topic
         */
-       void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException;
+       void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException,ConfigDbException;
 }
diff --git a/src/main/java/com/att/dmf/mr/metabroker/Broker1.java b/src/main/java/com/att/dmf/mr/metabroker/Broker1.java
new file mode 100644 (file)
index 0000000..e7d7f6c
--- /dev/null
@@ -0,0 +1,95 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.dmf.mr.metabroker;
+
+import java.util.List;
+
+import com.att.dmf.mr.CambriaApiException;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * A broker interface to manage metadata around topics, etc.
+ * alternate for Broker1 to avoid this error  in spring boot
+ *org.springframework.beans.factory.NoUniqueBeanDefinitionException:
+ * No qualifying bean of type [com.att.dmf.mr.metabroker.Broker] is defined: 
+ * expected single matching bean but found 2: mmb,dMaaPKafkaMetaBroker
+
+ *
+ */
+public interface Broker1 {
+       /**
+        * 
+        * @author Ramkumar
+        *
+        */
+       public class TopicExistsException extends Exception {
+               /**
+                * 
+                * @param topicName
+                */
+               public TopicExistsException(String topicName) {
+                       super("Topic " + topicName + " exists.");
+               }
+
+               private static final long serialVersionUID = 1L;
+       }
+
+       /**
+        * Get all topics in the underlying broker.
+        * 
+        * @return
+        * @throws ConfigDbException
+        */
+       List<Topic> getAllTopics() throws ConfigDbException;
+
+       /**
+        * Get a specific topic from the underlying broker.
+        * 
+        * @param topic
+        * @return a topic, or null
+        */
+       Topic getTopic(String topic) throws ConfigDbException;
+
+       /**
+        * create a  topic
+        * 
+        * @param topic
+        * @param description
+        * @param ownerApiKey
+        * @param partitions
+        * @param replicas
+        * @param transactionEnabled
+        * @return
+        * @throws TopicExistsException
+        * @throws CambriaApiException
+        */
+       Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas,
+                       boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException;
+
+       /**
+        * Delete a topic by name
+        * 
+        * @param topic
+        */
+       void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException,ConfigDbException;
+}
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.metabroker;
+package com.att.dmf.mr.metabroker;
 
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.security.NsaAcl;
@@ -28,14 +28,14 @@ import com.att.nsa.security.ReadWriteSecuredResource;
 /**
  * This is the interface for topic and all the topic related operations
  * get topic name, owner, description, transactionEnabled etc.
- * @author author
+ * @author nilanjana.maity
  *
  */
 public interface Topic extends ReadWriteSecuredResource
 {      
        /**
         * User defined exception for access denied while access the topic for Publisher and consumer
-        * @author author
+        * @author nilanjana.maity
         *
         *//*
        public class AccessDeniedException extends Exception
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher;
+package com.att.dmf.mr.metrics.publisher;
 
 import java.io.IOException;
 import java.util.List;
@@ -29,7 +29,7 @@ import java.util.concurrent.TimeUnit;
  * A Cambria batching publisher is a publisher with additional functionality
  * for managing delayed sends.
  * 
- * @author author
+ * @author peter
  *
  */
 public interface CambriaBatchingPublisher extends CambriaPublisher
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher;
+package com.att.dmf.mr.metrics.publisher;
 
 //import org.slf4j.Logger;
 
@@ -29,7 +29,7 @@ import com.att.eelf.configuration.EELFLogger;
 
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public interface CambriaClient {
@@ -8,24 +8,24 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher;
+package com.att.dmf.mr.metrics.publisher;
 
 import java.io.IOException;
 
 /**
  * This interface will provide fetch mechanism for consumer
- * @author author
+ * @author nilanjana.maity
  *
  */
 public interface CambriaConsumer extends CambriaClient
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher;
+package com.att.dmf.mr.metrics.publisher;
 
 import java.io.IOException;
 import java.util.Collection;
@@ -27,7 +27,7 @@ import java.util.Collection;
 /**
  * A Cambria publishing interface.
  * 
- * @author author
+ * @author peter
  *
  */
 public interface CambriaPublisher extends CambriaClient {
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher;
+package com.att.dmf.mr.metrics.publisher;
 
 import java.io.UnsupportedEncodingException;
 import java.net.URLEncoder;
@@ -30,7 +30,7 @@ import java.util.List;
 import org.apache.http.HttpHost;
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class CambriaPublisherUtility
@@ -53,7 +53,7 @@ public class CambriaPublisherUtility
                }
                catch ( UnsupportedEncodingException e )
                {
-                       throw new AssertionError ( e );
+                       throw new RuntimeException ( e );
                }
        }
 /**
@@ -8,26 +8,26 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher;
+package com.att.dmf.mr.metrics.publisher;
 
 import java.net.MalformedURLException;
 import java.util.Collection;
 import java.util.TreeSet;
 import java.util.UUID;
 
-import com.att.nsa.cambria.metrics.publisher.impl.DMaaPCambriaConsumerImpl;
-import com.att.nsa.cambria.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher;
+import com.att.dmf.mr.metrics.publisher.impl.DMaaPCambriaConsumerImpl;
+import com.att.dmf.mr.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher;
 
 /**
  * A factory for Cambria clients.<br/>
@@ -42,7 +42,7 @@ import com.att.nsa.cambria.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublis
  * <br/>
  * Publishers
  * 
- * @author author
+ * @author peter
  */
 public class DMaaPCambriaClientFactory {
        /**
@@ -52,7 +52,8 @@ public class DMaaPCambriaClientFactory {
         * 
         * @param hostList
         *            A comma separated list of hosts to use to connect to Cambria.
-        *            You can include port numbers (3904 is the default). 
+        *            You can include port numbers (3904 is the default). For
+        *            example, "ueb01hydc.it.att.com:8080,ueb02hydc.it.att.com"
         * 
         * @param topic
         *            The topic to consume
@@ -168,7 +169,8 @@ public class DMaaPCambriaClientFactory {
         * 
         * @param hostList
         *            A comma separated list of hosts to use to connect to Cambria.
-        *            You can include port numbers (3904 is the default). 
+        *            You can include port numbers (3904 is the default). For
+        *            example, "ueb01hydc.it.att.com:8080,ueb02hydc.it.att.com"
         * @param topic
         *            The topic to consume
         * @param consumerGroup
@@ -239,11 +241,11 @@ public class DMaaPCambriaClientFactory {
                if (sfMock != null)
                        return sfMock;
                try {
-                       return new DMaaPCambriaConsumerImpl(hostSet, topic, consumerGroup,
-                                       consumerId, timeoutMs, limit, filter, apiKey, apiSecret);
-               } catch (MalformedURLException e) {
-                       throw new RuntimeException(e);
-               }
+               return new DMaaPCambriaConsumerImpl(hostSet, topic, consumerGroup,
+                               consumerId, timeoutMs, limit, filter, apiKey, apiSecret);
+       } catch (MalformedURLException e) {
+               throw new RuntimeException(e);
+       }
        }
 
        /*************************************************************************/
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher.impl;
+package com.att.dmf.mr.metrics.publisher.impl;
 
 import java.net.MalformedURLException;
 import java.util.Collection;
@@ -29,30 +29,32 @@ import java.util.concurrent.TimeUnit;
 
 import org.json.JSONArray;
 import org.json.JSONException;
+
+import com.att.dmf.mr.constants.CambriaConstants;
 //import org.slf4j.Logger;
 //import org.slf4j.LoggerFactory;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
 import com.att.nsa.apiClient.http.CacheUse;
 import com.att.nsa.apiClient.http.HttpClient;
-import com.att.nsa.cambria.constants.CambriaConstants;
 
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
-public class CambriaBaseClient extends HttpClient implements com.att.nsa.cambria.metrics.publisher.CambriaClient 
+public class CambriaBaseClient extends HttpClient implements com.att.dmf.mr.metrics.publisher.CambriaClient 
 {
        protected CambriaBaseClient ( Collection<String> hosts ) throws MalformedURLException
        {
                this ( hosts, null );
        }
 
-       protected CambriaBaseClient ( Collection<String> hosts, String clientSignature ) throws MalformedURLException
+       public CambriaBaseClient ( Collection<String> hosts, String clientSignature ) throws MalformedURLException
        {
-//             super ( hosts, CambriaConstants.kStdCambriaServicePort, clientSignature,
-//                     CacheUse.NONE, 1, 1, TimeUnit.MILLISECONDS );
+               /*super ( hosts, CambriaConstants.kStdCambriaServicePort, clientSignature,
+                       CacheUse.NONE, 1, 1, TimeUnit.MILLISECONDS );*/
+               
                super(ConnectionType.HTTP, hosts, CambriaConstants.kStdCambriaServicePort, clientSignature, CacheUse.NONE, 1, 1L, TimeUnit.MILLISECONDS, 32, 32, 600000);
 
                //fLog = LoggerFactory.getLogger ( this.getClass().getName () );
@@ -65,7 +67,7 @@ public class CambriaBaseClient extends HttpClient implements com.att.nsa.cambria
        {
        }
 
-       protected Set<String> jsonArrayToSet ( JSONArray a ) throws JSONException
+       public Set<String> jsonArrayToSet ( JSONArray a ) throws JSONException
        {
                if ( a == null ) return null;
 
@@ -86,7 +88,7 @@ public class CambriaBaseClient extends HttpClient implements com.att.nsa.cambria
                //replaceLogger ( log );
        }
 
-       protected EELFLogger  getLog ()
+       public EELFLogger  getLog ()
        {
                return fLog;
        }
@@ -8,23 +8,23 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher.impl;
+package com.att.dmf.mr.metrics.publisher.impl;
 
 /**
  * 
  * This class maintains the system clocks
- * @author author
+ * @author nilanjana.maity
  *
  */
 public class Clock
@@ -53,7 +53,7 @@ public class Clock
         * Get current time in milliseconds
         * @return current time in ms
         */
-       protected long nowImpl ()
+       public long nowImpl ()
        {
                return System.currentTimeMillis ();
        }
@@ -61,13 +61,13 @@ public class Clock
        /**
         * Initialize constructor
         */
-       protected Clock ()
+       public Clock ()
        {
        }
 
        private static Clock sfClock = null;
 
-       protected synchronized static void register ( Clock testClock )
+       public synchronized static void register ( Clock testClock )
        {
                sfClock = testClock;
        }
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher.impl;
+package com.att.dmf.mr.metrics.publisher.impl;
 
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
@@ -29,23 +29,23 @@ import java.util.Collection;
 import java.util.LinkedList;
 import java.util.List;
 
-import jline.internal.Log;
-
 import org.json.JSONArray;
 import org.json.JSONException;
 import org.json.JSONObject;
 
+import com.att.dmf.mr.metrics.publisher.CambriaPublisherUtility;
 import com.att.nsa.apiClient.http.HttpException;
 import com.att.nsa.apiClient.http.HttpObjectNotFoundException;
-import com.att.nsa.cambria.metrics.publisher.CambriaPublisherUtility;
+
+import jline.internal.Log;
 
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class DMaaPCambriaConsumerImpl extends CambriaBaseClient
-               implements com.att.nsa.cambria.metrics.publisher.CambriaConsumer {
+               implements com.att.dmf.mr.metrics.publisher.CambriaConsumer {
        private final String fTopic;
        private final String fGroup;
        private final String fId;
@@ -64,7 +64,6 @@ public class DMaaPCambriaConsumerImpl extends CambriaBaseClient
         * @param filter
         * @param apiKey
         * @param apiSecret
-        * @throws MalformedURLException 
         */
        public DMaaPCambriaConsumerImpl(Collection<String> hostPart, final String topic, final String consumerGroup,
                        final String consumerId, int timeoutMs, int limit, String filter, String apiKey, String apiSecret) throws MalformedURLException {
@@ -138,7 +137,7 @@ public class DMaaPCambriaConsumerImpl extends CambriaBaseClient
                return msgs;
        }
 
-       protected String createUrlPath(int timeoutMs, int limit) {
+       public String createUrlPath(int timeoutMs, int limit) {
                final StringBuilder url = new StringBuilder(CambriaPublisherUtility.makeConsumerUrl(fTopic, fGroup, fId));
                final StringBuilder adds = new StringBuilder();
                if (timeoutMs > -1) {
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher.impl;
+package com.att.dmf.mr.metrics.publisher.impl;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
@@ -40,24 +40,24 @@ import javax.ws.rs.client.WebTarget;
 import javax.ws.rs.core.Response;
 
 import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.metrics.publisher.CambriaPublisherUtility;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.metrics.publisher.CambriaPublisherUtility;
 
 /**
  * 
  * class DMaaPCambriaSimplerBatchPublisher used to send the publish the messages
  * in batch
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
-               implements com.att.nsa.cambria.metrics.publisher.CambriaBatchingPublisher {
+               implements com.att.dmf.mr.metrics.publisher.CambriaBatchingPublisher {
        /**
         * 
         * static inner class initializes with urls, topic,batchSize
         * 
-        * @author author
+        * @author anowarul.islam
         *
         */
        public static class Builder {
@@ -118,12 +118,13 @@ public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
                 * 
                 * @return
                 */
-               public DMaaPCambriaSimplerBatchPublisher build() {
+               public DMaaPCambriaSimplerBatchPublisher build()  {
+                       
                        try {
-                               return new DMaaPCambriaSimplerBatchPublisher(fUrls, fTopic, fMaxBatchSize, fMaxBatchAgeMs, fCompress);
-                       } catch (MalformedURLException e) {
-                               throw new RuntimeException(e);
-                       }
+                       return new DMaaPCambriaSimplerBatchPublisher(fUrls, fTopic, fMaxBatchSize, fMaxBatchAgeMs, fCompress);
+               } catch (MalformedURLException e) {
+                       throw new RuntimeException(e);
+               }
                }
 
                private Collection<String> fUrls;
@@ -190,8 +191,7 @@ public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
                                                + "Consider using CambriaBatchingPublisher.close( long timeout, TimeUnit timeoutUnits ) to recapture unsent messages on close.");
                        }
                } catch (InterruptedException e) {
-                       getLog().info(" Interruption Exception is caught here : " + e.getMessage());
-                       Thread.currentThread().interrupt();
+                       getLog().warn("Possible message loss. " + e.getMessage(), e);
                } catch (IOException e) {
                        getLog().warn("Possible message loss. " + e.getMessage(), e);
                }
@@ -374,7 +374,6 @@ public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
         * @param maxBatchSize
         * @param maxBatchAgeMs
         * @param compress
-        * @throws MalformedURLException 
         */
        private DMaaPCambriaSimplerBatchPublisher(Collection<String> hosts, String topic, int maxBatchSize,
                        long maxBatchAgeMs, boolean compress) throws MalformedURLException {
@@ -406,7 +405,7 @@ public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
        /**
         * 
         * 
-        * @author author
+        * @author anowarul.islam
         *
         */
        private static class TimestampedMessage extends message {
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.resources;
+package com.att.dmf.mr.resources;
 
 import java.io.IOException;
 import java.io.InputStream;
@@ -27,19 +27,19 @@ import java.util.zip.GZIPInputStream;
 
 import javax.servlet.http.HttpServletResponse;
 
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.resources.streamReaders.CambriaJsonStreamReader;
+import com.att.dmf.mr.resources.streamReaders.CambriaRawStreamReader;
+import com.att.dmf.mr.resources.streamReaders.CambriaStreamReader;
+import com.att.dmf.mr.resources.streamReaders.CambriaTextStreamReader;
 import com.att.nsa.apiServer.streams.ChunkedInputStream;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.resources.streamReaders.CambriaJsonStreamReader;
-import com.att.nsa.cambria.resources.streamReaders.CambriaRawStreamReader;
-import com.att.nsa.cambria.resources.streamReaders.CambriaStreamReader;
-import com.att.nsa.cambria.resources.streamReaders.CambriaTextStreamReader;
 import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
 
 /**
  * An inbound event set.
  * 
- * @author author
+ * @author peter
  */
 public class CambriaEventSet {
        private final reader fReader;
@@ -99,7 +99,7 @@ public class CambriaEventSet {
 
        /**
         * 
-        * @author author
+        * @author anowarul.islam
         *
         */
        public interface reader {
@@ -8,49 +8,57 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.resources;
+package com.att.dmf.mr.resources;
 
 import java.io.IOException;
 import java.io.OutputStream;
+import java.util.ArrayList;
 import java.util.Date;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
 import org.json.JSONException;
 import org.json.JSONObject;
 import org.json.JSONTokener;
 
 import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.Consumer.Message;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.metabroker.Topic;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder.StreamWriter;
-import com.att.nsa.cambria.utils.Utils;
-
-import jline.internal.Log;
-
-
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder.StreamWriter;
+import com.att.dmf.mr.utils.Utils;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+//import com.att.nsa.drumlin.till.nv.rrNvReadable;
+/*import com.att.sa.highlandPark.config.HpConfigContext;
+import com.att.sa.highlandPark.config.HpReaderException;
+import com.att.sa.highlandPark.events.HpJsonEvent;
+import com.att.sa.highlandPark.events.HpJsonEventFactory;
+import com.att.sa.highlandPark.processor.HpAlarmFilter;
+import com.att.sa.highlandPark.processor.HpEvent;
+import com.att.sa.highlandPark.processor.HpProcessingEngine;
+import com.att.sa.highlandPark.processor.HpProcessingEngine.EventFactory;
+*/
 /**
  * class used to write the consumed messages
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class CambriaOutboundEventStream implements StreamWriter {
@@ -61,16 +69,16 @@ public class CambriaOutboundEventStream implements StreamWriter {
         * static innerclass it takes all the input parameter for kafka consumer
         * like limit, timeout, meta, pretty
         * 
-        * @author author
+        * @author anowarul.islam
         *
         */
        public static class Builder {
 
                // Required
                private final Consumer fConsumer;
-               //private final rrNvReadable fSettings;   // used during write to tweak
-                                                                                               // format, decide to explicitly
-                                                                                               // close stream or not
+               // private final rrNvReadable fSettings; // used during write to tweak
+               // format, decide to explicitly
+               // close stream or not
 
                // Optional
                private int fLimit;
@@ -78,6 +86,7 @@ public class CambriaOutboundEventStream implements StreamWriter {
                private String fTopicFilter;
                private boolean fPretty;
                private boolean fWithMeta;
+               ArrayList<Consumer> fKafkaConsumerList;
 
                // private int fOffset;
                /**
@@ -88,13 +97,14 @@ public class CambriaOutboundEventStream implements StreamWriter {
                 */
                public Builder(Consumer c) {
                        this.fConsumer = c;
-                       //this.fSettings = settings;
+                       // this.fSettings = settings;
 
                        fLimit = CambriaConstants.kNoTimeout;
                        fTimeoutMs = CambriaConstants.kNoLimit;
                        fTopicFilter = CambriaConstants.kNoFilter;
                        fPretty = false;
                        fWithMeta = false;
+                       //this.fKafkaConsumerList = consList;
                        // fOffset = CambriaEvents.kNextOffset;
                }
 
@@ -187,31 +197,31 @@ public class CambriaOutboundEventStream implements StreamWriter {
                fConsumer = builder.fConsumer;
                fLimit = builder.fLimit;
                fTimeoutMs = builder.fTimeoutMs;
-               //fSettings = builder.fSettings;
+               // fSettings = builder.fSettings;
                fSent = 0;
                fPretty = builder.fPretty;
                fWithMeta = builder.fWithMeta;
-               
-//             if (CambriaConstants.kNoFilter.equals(builder.fTopicFilter)) {
-//                     fHpAlarmFilter = null;
-//                     fHppe = null;
-//             } else {
-//                     try {
-//                             final JSONObject filter = new JSONObject(new JSONTokener(builder.fTopicFilter));
-//                             HpConfigContext<HpEvent> cc = new HpConfigContext<HpEvent>();
-//                             fHpAlarmFilter = cc.create(HpAlarmFilter.class, filter);
-//                             final EventFactory<HpJsonEvent> ef = new HpJsonEventFactory();
-//                             fHppe = new HpProcessingEngine<HpJsonEvent>(ef);
-//                     } catch (HpReaderException e) {
-//                             // JSON was okay, but the filter engine says it's bogus
-//                             throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
-//                                             "Couldn't create filter: " + e.getMessage());
-//                     } catch (JSONException e) {
-//                             // user sent a bogus JSON object
-//                             throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
-//                                             "Couldn't parse JSON: " + e.getMessage());
-//                     }
-//             }
+               fKafkaConsumerList = builder.fKafkaConsumerList;
+       /*      if (CambriaConstants.kNoFilter.equals(builder.fTopicFilter)) {
+                       fHpAlarmFilter = null;
+                       fHppe = null;
+               } else {
+                       try {
+                               final JSONObject filter = new JSONObject(new JSONTokener(builder.fTopicFilter));
+                               HpConfigContext<HpEvent> cc = new HpConfigContext<HpEvent>();
+                               fHpAlarmFilter = cc.create(HpAlarmFilter.class, filter);
+                               final EventFactory<HpJsonEvent> ef = new HpJsonEventFactory();
+                               fHppe = new HpProcessingEngine<HpJsonEvent>(ef);
+                       } catch (HpReaderException e) {
+                               // JSON was okay, but the filter engine says it's bogus
+                               throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+                                               "Couldn't create filter: " + e.getMessage());
+                       } catch (JSONException e) {
+                               // user sent a bogus JSON object
+                               throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+                                               "Couldn't parse JSON: " + e.getMessage());
+                       }
+               }*/
        }
 
        /**
@@ -222,16 +232,20 @@ public class CambriaOutboundEventStream implements StreamWriter {
        public interface operation {
                /**
                 * Call thread.sleep
+                * 
                 * @throws IOException
                 */
                void onWait() throws IOException;
-/**
- * provides the output based in the consumer paramter
- * @param count
- * @param msg
- * @throws IOException
- */
-               void onMessage(int count, Message msg) throws IOException;
+
+               /**
+                * provides the output based in the consumer paramter
+                * 
+                * @param count
+                * @param msg
+                * @throws IOException
+                */
+               // void onMessage(int count, Message msg) throws IOException;
+               void onMessage(int count, String msg, String transId, long offSet) throws IOException, JSONException;
        }
 
        /**
@@ -246,74 +260,63 @@ public class CambriaOutboundEventStream implements StreamWriter {
        /**
         * 
         * @param os
-        * throws IOException
+        *            throws IOException
         */
        public void write(final OutputStream os) throws IOException {
-               //final boolean transactionEnabled = topic.isTransactionEnabled();
-               //final boolean transactionEnabled = isTransEnabled();
-               final boolean transactionEnabled = istransEnable;
+               // final boolean transactionEnabled = topic.isTransactionEnabled();
+               // final boolean transactionEnabled = isTransEnabled();
+               // final boolean transactionEnabled = istransEnable;
+               // synchronized(this){
                os.write('[');
-
                fSent = forEachMessage(new operation() {
                        @Override
-                       public void onMessage(int count, Message msg) throws IOException, JSONException {
-
-                               String message = "";
-                               JSONObject jsonMessage = null;
-                               if (transactionEnabled) {
-                                       jsonMessage = new JSONObject(msg.getMessage());
-                                       message = jsonMessage.getString("message");
-                               }
+                       public void onMessage(int count, String msg, String transId, long offSet)
+                                       throws IOException, JSONException {
 
                                if (count > 0) {
                                        os.write(',');
                                }
-
                                if (fWithMeta) {
                                        final JSONObject entry = new JSONObject();
-                                       entry.put("offset", msg.getOffset());
-                                       entry.put("message", message);
+                                       entry.put("offset", offSet);
+                                       entry.put("message", msg);
                                        os.write(entry.toString().getBytes());
                                } else {
-                                       //os.write(message.getBytes());
-                                        String jsonString = "";
-                                       if(transactionEnabled){
-                                               jsonString= JSONObject.valueToString(message);
-                                       }else{
-                                               jsonString = JSONObject.valueToString (msg.getMessage());
-                                               }
-                                       os.write ( jsonString.getBytes () );
+                                       // os.write(message.getBytes());
+                                               String jsonString = JSONObject.valueToString(msg);
+                                       os.write(jsonString.getBytes());
                                }
 
                                if (fPretty) {
                                        os.write('\n');
                                }
 
-                               
-                               String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
-                               if (null==metricTopicname)
-                         metricTopicname="msgrtr.apinode.metrics.dmaap";
-                
-                if (!metricTopicname.equalsIgnoreCase(topic.getName())) {
-                               if (transactionEnabled) {
-                                       final String transactionId = jsonMessage.getString("transactionId");
-                                       responseTransactionId = transactionId;
-
-                                       StringBuilder consumerInfo = new StringBuilder();
-                                       if (null != dmaapContext && null != dmaapContext.getRequest()) {
-                                               final HttpServletRequest request = dmaapContext.getRequest();
-                                               consumerInfo.append("consumerIp= \"" + request.getRemoteHost() + "\",");
-                                               consumerInfo.append("consServerIp= \"" + request.getLocalAddr() + "\",");
-                                               consumerInfo.append("consumerId= \"" + Utils.getUserApiKey(request) + "\",");
-                                               consumerInfo.append(
-                                                               "consumerGroup= \"" + getConsumerGroupFromRequest(request.getRequestURI()) + "\",");
-                                               consumerInfo.append("consumeTime= \"" + Utils.getFormattedDate(new Date()) + "\",");
+                               String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap
+                                               .getProperty(CambriaConstants.msgRtr_prop, "metrics.send.cambria.topic");
+                               if (null == metricTopicname)
+                                       metricTopicname = "msgrtr.apinode.metrics.dmaap";
+                               if (!metricTopicname.equalsIgnoreCase(topic.getName())) {
+                                       try {
+                                               if (istransEnable && istransType) {
+                                                       // final String transactionId =
+                                                       // jsonMessage.getString("transactionId");
+                                                       // responseTransactionId = transId;
+                                                       StringBuilder consumerInfo = new StringBuilder();
+                                                       if (null != dmaapContext && null != dmaapContext.getRequest()) {
+                                                               final HttpServletRequest request = dmaapContext.getRequest();
+                                                               consumerInfo.append("consumerIp= \"" + request.getRemoteHost() + "\",");
+                                                               consumerInfo.append("consServerIp= \"" + request.getLocalAddr() + "\",");
+                                                               consumerInfo.append("consumerId= \"" + Utils.getUserApiKey(request) + "\",");
+                                                               consumerInfo.append("consumerGroup= \""
+                                                                               + getConsumerGroupFromRequest(request.getRequestURI()) + "\",");
+                                                               consumerInfo.append("consumeTime= \"" + Utils.getFormattedDate(new Date()) + "\",");
+                                                       }
+                                                       log.info("Consumer [" + consumerInfo.toString() + "transactionId= \"" + transId
+                                                                       + "\",messageLength= \"" + msg.length() + "\",topic= \"" + topic.getName() + "\"]");
+                                               }
+                                       } catch (Exception e) {
                                        }
-
-                                       log.info("Consumer [" + consumerInfo.toString() + "transactionId= \"" + transactionId
-                                                       + "\",messageLength= \"" + message.length() + "\",topic= \"" + topic.getName() + "\"]");
                                }
-                }
 
                        }
 
@@ -321,6 +324,7 @@ public class CambriaOutboundEventStream implements StreamWriter {
                        /**
                         * 
                         * It makes thread to wait
+                        * 
                         * @throws IOException
                         */
                        public void onWait() throws IOException {
@@ -329,15 +333,14 @@ public class CambriaOutboundEventStream implements StreamWriter {
                                        // FIXME: would be good to wait/signal
                                        Thread.sleep(100);
                                } catch (InterruptedException e) {
-                                       Log.error(e.toString());
-                               Thread.currentThread().interrupt();     
+                                       // ignore
                                }
                        }
                });
 
-               //if (null != dmaapContext && isTransactionEnabled()) {
-                       if (null != dmaapContext && istransEnable) {
-                       
+               // if (null != dmaapContext && isTransactionEnabled()) {
+               if (null != dmaapContext && istransEnable && istransType) {
+
                        dmaapContext.getResponse().setHeader("transactionId",
                                        Utils.getResponseTransactionId(responseTransactionId));
                }
@@ -346,12 +349,14 @@ public class CambriaOutboundEventStream implements StreamWriter {
                os.flush();
 
                boolean close_out_stream = true;
-               String strclose_out_stream = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"close.output.stream");
-               if(null!=strclose_out_stream)close_out_stream=Boolean.parseBoolean(strclose_out_stream);
-               
-               //if (fSettings.getBoolean("close.output.stream", true)) {
-                               if (close_out_stream) {
+               String strclose_out_stream = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "close.output.stream");
+               if (null != strclose_out_stream)
+                       close_out_stream = Boolean.parseBoolean(strclose_out_stream);
+
+               // if (fSettings.getBoolean("close.output.stream", true)) {
+               if (close_out_stream) {
                        os.close();
+                       // }
                }
        }
 
@@ -371,21 +376,22 @@ public class CambriaOutboundEventStream implements StreamWriter {
                }
                return null;
        }
-/**
- * 
- * @param op
- * @return
- * @throws IOException
- * @throws JSONException 
- */
+
+       /**
+        * 
+        * @param op
+        * @return
+        * @throws IOException
+        * @throws JSONException
+        */
        public int forEachMessage(operation op) throws IOException, JSONException {
                final int effectiveLimit = (fLimit == 0 ? kTopLimit : fLimit);
 
                int count = 0;
                boolean firstPing = true;
-
+               // boolean isTransType=false;
                final long startMs = System.currentTimeMillis();
-               final long timeoutMs = fTimeoutMs + startMs;
+               final long timeoutMs = fTimeoutMs + startMs -500; //500 ms used in poll 
 
                while (firstPing || (count == 0 && System.currentTimeMillis() < timeoutMs)) {
                        if (!firstPing) {
@@ -393,23 +399,63 @@ public class CambriaOutboundEventStream implements StreamWriter {
                        }
                        firstPing = false;
 
-                       Consumer.Message msg = null;
-                       while (count < effectiveLimit && (msg = fConsumer.nextMessage()) != null) {
+               
+                                Consumer.Message msgRecord = null;
+                                while (count < effectiveLimit && (msgRecord =
+                                fConsumer.nextMessage()) != null) {
 
-                               
                                String message = "";
-                       //      if (topic.isTransactionEnabled() || true) {
-                               if (istransEnable) {
-                                       // As part of DMaaP changes we are wrapping the original
-                                       // message into a json object
-                                       // and then this json object is further wrapped into message
-                                       // object before publishing,
-                                       // so extracting the original message from the message
-                                       // object for matching with filter.
-                                       final JSONObject jsonMessage = new JSONObject(msg.getMessage());
-                                       message = jsonMessage.getString("message");
-                               } else {
-                                       message = msg.getMessage();
+                               String transactionid = "";
+                               try {
+                   // String msgRecord = msg;
+                                       JSONObject jsonMessage = new JSONObject(msgRecord);
+                                       String[] keys = JSONObject.getNames(jsonMessage);
+                                       boolean wrapheader1 = false;
+                                       boolean wrapheader2 = false;
+                                       boolean found_attr3 = false;
+                                       String wrapElement1 = "message";
+                                       String wrapElement2 = "msgWrapMR";
+                                       String transIdElement = "transactionId";
+                                       if (null != keys) {
+                                               for (String key : keys) {
+                                                       if (key.equals(wrapElement1)) {
+                                                               wrapheader1 = true;
+                                                       } else if (key.equals(wrapElement2)) {
+                                                               wrapheader2 = true;
+                                                       } else if (key.equals(transIdElement)) {
+                                                               found_attr3 = true;
+                                                               transactionid = jsonMessage.getString(key);
+                                                       }
+                                               }
+                                       }
+
+                                       // returns contents of attribute 1 if both attributes
+                                       // present, otherwise
+                                       // the whole msg
+                                       if (wrapheader2 && found_attr3) {
+                                               message = jsonMessage.getString(wrapElement2);
+                                       } else if (wrapheader1 && found_attr3) {
+                                               message = jsonMessage.getString(wrapElement1);
+                                       } else {
+                                               message = msgRecord.getMessage();
+                                       }
+                                       // jsonMessage = extractMessage(jsonMessage ,
+                                       // "message","msgWrapMR","transactionId");
+                                       istransType = true;
+                               } catch (JSONException e) { // This check is required for the
+                                                                                       // message sent by MR AAF flow but
+                                                                                       // consumed by UEB ACL flow which
+                                                                                       // wont expect transaction id in
+                                                                                       // cambria client api
+                                       // Ignore
+                                       log.info("JSON Exception logged when the message is non JSON Format");
+                               } catch (Exception exp) {
+                                       log.info("****Some Exception occured for writing messages in topic" + topic.getName()
+                                                       + "  Exception" + exp);
+                               }
+                               if (message == null || message.equals("")) {
+                                       istransType = false;
+                                       message = msgRecord.getMessage();
                                }
 
                                // If filters are enabled/set, message should be in JSON format
@@ -417,23 +463,26 @@ public class CambriaOutboundEventStream implements StreamWriter {
                                // otherwise filter will automatically ignore message in
                                // non-json format.
                                if (filterMatches(message)) {
-                                       op.onMessage(count, msg);
+                                       op.onMessage(count, message, transactionid, msgRecord.getOffset());
                                        count++;
+
                                }
+
                        }
                }
-
                return count;
        }
 
+       
+
        /**
         * 
         * Checks whether filter is initialized
         */
-//     private boolean isFilterInitialized() {
-//             return (fHpAlarmFilter != null && fHppe != null);
-//     }
-
+       /*private boolean isFilterInitialized() {
+               return (fHpAlarmFilter != null && fHppe != null);
+       }
+*/
        /**
         * 
         * @param msg
@@ -441,18 +490,18 @@ public class CambriaOutboundEventStream implements StreamWriter {
         */
        private boolean filterMatches(String msg) {
                boolean result = true;
-//             if (isFilterInitialized()) {
-//                     try {
-//                             final HpJsonEvent e = new HpJsonEvent("e", new JSONObject(msg));
-//                             result = fHpAlarmFilter.matches(fHppe, e);
-//                     } catch (JSONException x) {
-//                             // the msg may not be JSON
-//                             result = false;
-//                             log.error("Failed due to " + x.getMessage());
-//                     } catch (Exception x) {
-//                             log.error("Error using filter: " + x.getMessage(), x);
-//                     }
-//             }
+               /*if (isFilterInitialized()) {
+                       try {
+                               final HpJsonEvent e = new HpJsonEvent("e", new JSONObject(msg));
+                               result = fHpAlarmFilter.matches(fHppe, e);
+                       } catch (JSONException x) {
+                               // the msg may not be JSON
+                               result = false;
+                               log.error("Failed due to " + x.getMessage());
+                       } catch (Exception x) {
+                               log.error("Error using filter: " + x.getMessage(), x);
+                       }
+               }*/
 
                return result;
        }
@@ -472,48 +521,34 @@ public class CambriaOutboundEventStream implements StreamWriter {
        public void setTopic(Topic topic) {
                this.topic = topic;
        }
-       
+
        public void setTopicStyle(boolean aaftopic) {
                this.isAAFTopic = aaftopic;
        }
-       
-       public void setTransEnabled ( boolean transEnable) {
+
+       public void setTransEnabled(boolean transEnable) {
                this.istransEnable = transEnable;
        }
 
-       /*private boolean isTransactionEnabled() {
-               //return topic.isTransactionEnabled();
-               return true; // let metrics creates for all the topics
-       }*/
-
-       private boolean isTransEnabled() {
-               String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
-               boolean istransidreqd=false;
-               if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) || isAAFTopic){
-                       istransidreqd = true; 
-               }
-               
-               return istransidreqd;
-
-       }
        
        private final Consumer fConsumer;
        private final int fLimit;
        private final int fTimeoutMs;
-       //private final rrNvReadable fSettings;
+       // private final rrNvReadable fSettings;
        private final boolean fPretty;
        private final boolean fWithMeta;
        private int fSent;
 //     private final HpAlarmFilter<HpJsonEvent> fHpAlarmFilter;
-//     private final HpProcessingEngine<HpJsonEvent> fHppe;
+       //private final HpProcessingEngine<HpJsonEvent> fHppe;
        private DMaaPContext dmaapContext;
        private String responseTransactionId;
        private Topic topic;
        private boolean isAAFTopic = false;
        private boolean istransEnable = false;
-       
+       private ArrayList<Consumer> fKafkaConsumerList;
+       private boolean istransType = true;
+       // private static final Logger log =
+       // Logger.getLogger(CambriaOutboundEventStream.class);
 
-       //private static final Logger log = Logger.getLogger(CambriaOutboundEventStream.class);
-       
        private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaOutboundEventStream.class);
 }
\ No newline at end of file
@@ -8,21 +8,20 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.resources.streamReaders;
+package com.att.dmf.mr.resources.streamReaders;
 
 import java.io.InputStream;
-import java.util.logging.Logger;
 
 import javax.servlet.http.HttpServletResponse;
 
@@ -30,22 +29,19 @@ import org.json.JSONException;
 import org.json.JSONObject;
 import org.json.JSONTokener;
 
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.beans.LogDetails;
-import com.att.nsa.cambria.resources.CambriaEventSet.reader;
-
-import jline.internal.Log;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.beans.LogDetails;
+import com.att.dmf.mr.resources.CambriaEventSet.reader;
 
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public class CambriaJsonStreamReader implements reader {
-       private static final Logger LOG = Logger.getLogger(CambriaJsonStreamReader.class.toString());
        private final JSONTokener fTokens;
-       private final boolean fIsList;          
+       private final boolean fIsList;
        private long fCount;
        private final String fDefPart;
        public static final String kKeyField = "cambria.partition";
@@ -72,7 +68,6 @@ public class CambriaJsonStreamReader implements reader {
                                throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expecting an array or an object.");
                        }
                } catch (JSONException e) {
-                       Log.error(e);
                        throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
                }
        }
@@ -114,7 +109,6 @@ public class CambriaJsonStreamReader implements reader {
                        fCount++;
                        return new msg(o);
                } catch (JSONException e) {
-                       Log.error(e.toString());
                        throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
 
                }
@@ -8,34 +8,34 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.resources.streamReaders;
+package com.att.dmf.mr.resources.streamReaders;
 
 import java.io.IOException;
 import java.io.InputStream;
 
 import javax.servlet.http.HttpServletResponse;
 
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.beans.LogDetails;
-import com.att.nsa.cambria.resources.CambriaEventSet.reader;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.beans.LogDetails;
+import com.att.dmf.mr.resources.CambriaEventSet.reader;
 import com.att.nsa.util.StreamTools;
 
 /**
  * 
  * This stream reader reads raw bytes creating a single message.
- * @author author
+ * @author peter
  *
  */
 public class CambriaRawStreamReader implements reader
@@ -130,7 +130,7 @@ public class CambriaRawStreamReader implements reader
                }
                catch ( IOException e )
                {
-                       throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.toString());
+                       throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
                }
        }
        
@@ -8,28 +8,28 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.resources.streamReaders;
+package com.att.dmf.mr.resources.streamReaders;
 
 import java.io.IOException;
 import java.io.InputStream;
 
 import javax.servlet.http.HttpServletResponse;
 
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.beans.LogDetails;
-import com.att.nsa.cambria.resources.CambriaEventSet.reader;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.beans.LogDetails;
+import com.att.dmf.mr.resources.CambriaEventSet.reader;
 
 /**
  * Read an optionally chunked stream in the Cambria app format. This format
@@ -41,7 +41,7 @@ import com.att.nsa.cambria.resources.CambriaEventSet.reader;
  * Whitespace before/after each entry is ignored, so messages can be delivered
  * with newlines between them, or not.
  * 
- * @author author
+ * @author peter
  *
  */
 public class CambriaStreamReader implements reader {
@@ -8,44 +8,40 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.resources.streamReaders;
+package com.att.dmf.mr.resources.streamReaders;
 
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
-import java.util.logging.Logger;
 
 import javax.servlet.http.HttpServletResponse;
 
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.beans.LogDetails;
-import com.att.nsa.cambria.resources.CambriaEventSet.reader;
-
-import jline.internal.Log;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.beans.LogDetails;
+import com.att.dmf.mr.resources.CambriaEventSet.reader;
 
 /**
  * This stream reader just pulls single lines. It uses the default partition if provided. If
  * not, the key is the current time, which does not guarantee ordering.
  * 
- * @author author
+ * @author peter
  *
  */
 public class CambriaTextStreamReader implements reader
 {
-       private Logger log = Logger.getLogger(CambriaTextStreamReader.class.toString());
        /**
         * This is the constructor for Cambria Text Reader format
         * @param is
@@ -135,7 +131,6 @@ public class CambriaTextStreamReader implements reader
                }
                catch ( IOException e )
                {
-                       Log.error(e);
                        throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
                }
        }
@@ -8,29 +8,29 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.security;
+package com.att.dmf.mr.security;
 
 import javax.servlet.http.HttpServletRequest;
 
-import com.att.nsa.cambria.CambriaApiException;
+import com.att.dmf.mr.CambriaApiException;
 
 
 
 
 /**
  * 
- * @author author
+ * @author sneha.d.desai
  *
  */
 public interface DMaaPAAFAuthenticator {
@@ -8,30 +8,28 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.security;
-
+package com.att.dmf.mr.security;
 
 import javax.servlet.http.HttpServletRequest;
 
-
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.constants.CambriaConstants;
 
 
 /**
  * 
- * @author author
+ * @author sneha.d.desai
  *
  */
 public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator {
@@ -48,6 +46,7 @@ public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator {
                        
                        auth = true;
                }
+               //System.out.println("role " +role +"    user: "+ req.getRemoteUser() +"   : auth="+auth);
                return auth;
        }
 
@@ -55,9 +54,28 @@ public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator {
        public String aafPermissionString(String topicName, String action) throws CambriaApiException {
                
                
-               String mrNameS = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
-                               "msgRtr.namespace.aaf");
-               String permission = mrNameS+"|:topic."+topicName+"|"+action;
+               String permission = "";
+               String nameSpace ="";
+               if(topicName.contains(".") && topicName.contains("com.att")) {
+                       //String topic = topicName.substring(topicName.lastIndexOf(".")+1);
+                       nameSpace = topicName.substring(0,topicName.lastIndexOf("."));
+               }
+               else {
+                       nameSpace = null;
+                        nameSpace= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"defaultNSforUEB");
+                       
+                       if(null==nameSpace)nameSpace="com.att.dmaap.mr.ueb";
+                       
+                       
+                       /*ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+                                       DMaaPResponseCode.TOPIC_NOT_IN_AAF.getResponseCode(), "Topic does not exist in AAF"
+                                                       , null, Utils.getFormattedDate(new Date()), topicName,
+                                       null, null, null, null);
+                                       
+                       throw new CambriaApiException(errRes);*/
+               }
+               
+               permission = nameSpace+".mr.topic|:topic."+topicName+"|"+action;
                return permission;
                
        }
@@ -8,28 +8,28 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.security;
+package com.att.dmf.mr.security;
 
 import javax.servlet.http.HttpServletRequest;
 
-import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPContext;
 import com.att.nsa.security.NsaApiKey;
 
 
 /**
  * An interface for authenticating an inbound request.
- * @author author
+ * @author nilanjana.maity
  *
  * @param <K> NsaApiKey
  */
@@ -8,34 +8,32 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.security;
+package com.att.dmf.mr.security;
 
 import java.util.LinkedList;
 
 import javax.servlet.http.HttpServletRequest;
 
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.security.impl.DMaaPOriginalUebAuthenticator;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator;
 import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.NsaAuthenticator;
-import com.att.nsa.security.authenticators.OriginalUebAuthenticator;
 import com.att.nsa.security.db.NsaApiDb;
 import com.att.nsa.security.db.simple.NsaSimpleApiKey;
 
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  * @param <K>
  */
@@ -48,7 +46,7 @@ public class DMaaPAuthenticatorImpl<K extends NsaApiKey> implements DMaaPAuthent
        // Setting timeout to a large value for testing purpose.
        // private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10; //
        // 10 minutes
-       private static final long kDefaultRequestTimeWindow = (long)1000 * 60 * 10 * 10 * 10 * 10 * 10;
+       private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10 * 10 * 10 * 10 * 10;
 
        /**
         * Construct the security manager against an API key database
@@ -131,6 +129,5 @@ public class DMaaPAuthenticatorImpl<K extends NsaApiKey> implements DMaaPAuthent
        {
                this.fAuthenticators.add(a);
        }
-       
 
 }
@@ -8,35 +8,33 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.security.impl;
+package com.att.dmf.mr.security.impl;
 
 import javax.servlet.http.HttpServletRequest;
 
-
-
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.security.DMaaPAuthenticator;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.authenticators.MechIdAuthenticator;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.security.DMaaPAuthenticator;
 //import com.att.nsa.security.db.NsaApiDb;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.authenticators.MechIdAuthenticator;
 
 /**
  * An authenticator for AT&T MechIds.
  * 
- * @author author
+ * @author peter
  *
  * @param <K>
  */
@@ -8,27 +8,29 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.security.impl;
+package com.att.dmf.mr.security.impl;
 
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Date;
+
 import javax.servlet.http.HttpServletRequest;
+
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.security.DMaaPAuthenticator;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.security.DMaaPAuthenticator;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.drumlin.till.data.sha1HmacSigner;
 import com.att.nsa.security.NsaApiKey;
@@ -38,7 +40,7 @@ import com.att.nsa.security.db.NsaApiDb;
  * This authenticator handles an AWS-like authentication, originally used by the
  * Cambria server (the API server for UEB).
  * 
- * @author author
+ * @author peter
  *
  * @param <K>
  */
@@ -8,29 +8,29 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
 
 import java.io.IOException;
 
 import org.json.JSONException;
 
-import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPContext;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
 
 /**
- * @author author
+ * @author muzainulhaque.qazi
  *
  */
 public interface AdminService {
@@ -8,25 +8,25 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
 
 import java.io.IOException;
 
-import com.att.nsa.cambria.beans.ApiKeyBean;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.dmf.mr.beans.ApiKeyBean;
+import com.att.dmf.mr.beans.DMaaPContext;
 import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
 import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
 
 /**
@@ -8,33 +8,33 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
 
 import java.io.IOException;
 import java.io.InputStream;
 
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
 
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public interface EventsService {
@@ -8,28 +8,26 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
 
 import java.io.IOException;
 import java.io.InputStream;
 
-import org.json.JSONException;
-
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
@@ -37,7 +35,7 @@ import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
 /**
  * Contains the logic for executing calls to the Mirror Maker agent tool.
  * 
- * @author <a href="mailto:"></a>
+ * @author <a href="mailto:kawsar.jahan@att.com">Kawsar Jahan</a>
  *
  * @since May 25, 2016
  */
@@ -8,31 +8,31 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
 
 /**
- * @author 
+ * @author amol.ramesh.dalne
  *
  */
 import java.io.IOException;
 
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.beans.DMaaPContext;
 
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public interface MetricsService {
@@ -8,34 +8,34 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
 
 import java.io.IOException;
 
 import org.json.JSONException;
 
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.beans.TopicBean;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.beans.TopicBean;
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
 import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
 
 /**
  * interface provide all the topic related operations
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public interface TopicService {
@@ -8,29 +8,29 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
 
 import java.io.IOException;
 
 import com.att.aft.dme2.internal.jettison.json.JSONException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.transaction.TransactionObj;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.transaction.TransactionObj;
 import com.att.nsa.configs.ConfigDbException;
 
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  */
 public interface TransactionService {
@@ -1,3 +1,6 @@
+/**
+ * 
+ */
 /*******************************************************************************
  *  ============LICENSE_START=======================================================
  *  org.onap.dmaap
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-/**
- * 
- */
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
 
 import java.io.IOException;
 
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.configs.ConfigDbException;
-
-import kafka.common.TopicExistsException;
+import org.apache.kafka.common.errors.TopicExistsException;
 
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.nsa.configs.ConfigDbException;
 /**
- * @author author
+ * @author muzainulhaque.qazi
  *
  */
 public interface UIService {
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
 
 import java.io.IOException;
 import java.util.Collection;
@@ -30,21 +30,22 @@ import org.json.JSONException;
 import org.json.JSONObject;
 import org.springframework.stereotype.Component;
 
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.ConsumerFactory;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
+import com.att.dmf.mr.service.AdminService;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.ConsumerFactory;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;
-import com.att.nsa.cambria.service.AdminService;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.limits.Blacklist;
 import com.att.nsa.security.NsaApiKey;
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+//import com.att.sa.highlandPark.util.HpJsonUtil;
 
 /**
- * @author author
+ * @author muzainulhaque.qazi
  *
  */
 @Component
@@ -125,7 +126,26 @@ public class AdminServiceImpl implements AdminService {
                adminAuthenticate ( dMaaPContext );
 
                DMaaPResponseBuilder.respondOk ( dMaaPContext,
-                       new JSONObject().put ( "blacklist", setToJsonArray ( getIpBlacklist (dMaaPContext).asSet() ) ) );
+                       new JSONObject().put ( "blacklist",
+                                       setToJsonArray ( getIpBlacklist (dMaaPContext).asSet() ) ) );
+       }
+       
+       public static JSONArray setToJsonArray ( Set<?> fields )
+       {
+               return collectionToJsonArray ( fields );
+       }
+       
+       public static JSONArray collectionToJsonArray ( Collection<?> fields )
+       {
+               final JSONArray a = new JSONArray ();
+               if ( fields != null )
+               {
+                       for ( Object o : fields )
+                       {
+                               a.put ( o );
+                       }
+               }
+               return a;
        }
        
        /**
@@ -161,28 +181,10 @@ public class AdminServiceImpl implements AdminService {
        {
                
                final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dMaaPContext);
-               if ( (user == null || !user.getKey ().equals ( "admin" )) )
+               if ( user == null || !user.getKey ().equals ( "admin" ) )
                {
                        throw new AccessDeniedException ();
                }
        }
-       
-       public static JSONArray setToJsonArray ( Set<?> fields )
-       {
-               return collectionToJsonArray ( fields );
-       }
-
-       public static JSONArray collectionToJsonArray ( Collection<?> fields )
-       {
-               final JSONArray a = new JSONArray ();
-               if ( fields != null )
-               {
-                       for ( Object o : fields )
-                       {
-                               a.put ( o );
-                       }
-               }
-               return a;
-       }
 
 }
@@ -8,36 +8,35 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
 
 import java.io.IOException;
 
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
 import org.json.JSONArray;
 import org.json.JSONObject;
 import org.springframework.stereotype.Service;
 
-//import com.att.nsa.apiServer.util.Emailer;
-import com.att.nsa.cambria.utils.Emailer;
-import com.att.nsa.cambria.beans.ApiKeyBean;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;
-import com.att.nsa.cambria.service.ApiKeysService;
-import com.att.nsa.cambria.utils.ConfigurationReader;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.beans.ApiKeyBean;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
+import com.att.dmf.mr.service.ApiKeysService;
+import com.att.dmf.mr.utils.ConfigurationReader;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.utils.Emailer;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
 import com.att.nsa.security.NsaApiKey;
@@ -50,7 +49,7 @@ import com.att.nsa.security.db.simple.NsaSimpleApiKey;
  * Implementation of the ApiKeysService, this will provide the below operations,
  * getAllApiKeys, getApiKey, createApiKey, updateApiKey, deleteApiKey
  * 
- * @author author
+ * @author nilanjana.maity
  */
 @Service
 public class ApiKeysServiceImpl implements ApiKeysService {
@@ -135,7 +134,6 @@ public class ApiKeysServiceImpl implements ApiKeysService {
 
                log.debug("TopicService: : createApiKey....");
                
-               
                        String contactEmail = nsaApiKey.getEmail();
                        final boolean emailProvided = contactEmail != null && contactEmail.length() > 0 && contactEmail.indexOf("@") > 1 ;
                         String kSetting_AllowAnonymousKeys= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"apiKeys.allowAnonymous");
@@ -148,15 +146,12 @@ public class ApiKeysServiceImpl implements ApiKeysService {
                return;
              }
                
-
-         
-         
+               
                final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
                String apiKey = nsaApiKey.getKey();
                String sharedSecret = nsaApiKey.getSharedSecret();
                final NsaSimpleApiKey key = apiKeyDb.createApiKey(apiKey,
                                sharedSecret);
-
                if (null != key) {
 
                        if (null != nsaApiKey.getEmail()) {
@@ -170,6 +165,7 @@ public class ApiKeysServiceImpl implements ApiKeysService {
                        log.debug("=======ApiKeysServiceImpl: createApiKey : saving api key : "
                                        + key.toString() + "=====");
                        apiKeyDb.saveApiKey(key);
+                       // System.out.println("here4");
                        // email out the secret to validate the email address
                        if ( emailProvided )
                        {
@@ -200,7 +196,7 @@ public class ApiKeysServiceImpl implements ApiKeysService {
                                );
                        DMaaPResponseBuilder.respondOk(dmaapContext,
                                        o);
-               /*o.put("secret", "Emailed to " + contactEmail + ".");
+                        /*o.put("secret", "Emailed to " + contactEmail + ".");
                        DMaaPResponseBuilder.respondOk(dmaapContext,
                                        o); */
                        return;
@@ -8,26 +8,26 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
 
 import java.util.Set;
 import java.util.TreeSet;
 
-import com.att.nsa.cambria.transaction.DMaaPTransactionFactory;
-import com.att.nsa.cambria.transaction.DMaaPTransactionObj;
-import com.att.nsa.cambria.transaction.DMaaPTransactionObjDB;
-import com.att.nsa.cambria.transaction.TransactionObj;
+import com.att.dmf.mr.transaction.DMaaPTransactionFactory;
+import com.att.dmf.mr.transaction.DMaaPTransactionObj;
+import com.att.dmf.mr.transaction.DMaaPTransactionObjDB;
+import com.att.dmf.mr.transaction.TransactionObj;
 import com.att.nsa.configs.ConfigDb;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.configs.ConfigPath;
@@ -35,7 +35,7 @@ import com.att.nsa.configs.ConfigPath;
 /**
  * Persistent storage for Transaction objects built over an abstract config db.
  * 
- * @author author
+ * @author anowarul.islam
  *
  * @param <K>
  */
diff --git a/src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java
new file mode 100644 (file)
index 0000000..4ca6446
--- /dev/null
@@ -0,0 +1,867 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.dmf.mr.service.impl;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.Properties;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+
+import org.apache.http.HttpStatus;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.errors.TopicExistsException;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Service;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.ConsumerFactory;
+import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.dmf.mr.backends.Publisher;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.backends.kafka.KafkaLiveLockAvoider2;
+import com.att.dmf.mr.beans.DMaaPCambriaLimiter;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.beans.LogDetails;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.exception.DMaaPAccessDeniedException;
+import com.att.dmf.mr.exception.DMaaPErrorMessages;
+import com.att.dmf.mr.exception.DMaaPResponseCode;
+import com.att.dmf.mr.exception.ErrorResponse;
+
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.resources.CambriaEventSet;
+import com.att.dmf.mr.resources.CambriaOutboundEventStream;
+import com.att.dmf.mr.security.DMaaPAAFAuthenticator;
+import com.att.dmf.mr.security.DMaaPAAFAuthenticatorImpl;
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
+import com.att.dmf.mr.service.EventsService;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.utils.Utils;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.service.standards.MimeTypes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.util.rrConvertor;
+
+/**
+ * This class provides the functinality to publish and subscribe message to
+ * kafka
+ * 
+ * @author Ramkumar Sembaiyam
+ *
+ */
+@Service
+public class EventsServiceImpl implements EventsService {
+       // private static final Logger LOG =
+       // Logger.getLogger(EventsServiceImpl.class);
+       private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
+
+       private static final String BATCH_LENGTH = "event.batch.length";
+       private static final String TRANSFER_ENCODING = "Transfer-Encoding";
+       @Autowired
+       private DMaaPErrorMessages errorMessages;
+       
+       //@Autowired
+       //KafkaLiveLockAvoider2 kafkaLiveLockAvoider;
+
+       // @Value("${metrics.send.cambria.topic}")
+       // private String metricsTopic;
+
+       public DMaaPErrorMessages getErrorMessages() {
+               return errorMessages;
+       }
+
+       public void setErrorMessages(DMaaPErrorMessages errorMessages) {
+               this.errorMessages = errorMessages;
+       }
+
+       /**
+        * @param ctx
+        * @param topic
+        * @param consumerGroup
+        * @param clientId
+        * @throws ConfigDbException,
+        *             TopicExistsException, AccessDeniedException,
+        *             UnavailableException, CambriaApiException, IOException
+        * 
+        * 
+        */
+       @Override
+       public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
+                       throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
+                       CambriaApiException, IOException, DMaaPAccessDeniedException {
+               final long startTime = System.currentTimeMillis();
+               final HttpServletRequest req = ctx.getRequest();
+       //System.out.println(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"+kafkaLiveLockAvoider);
+               boolean isAAFTopic = false;
+               // was this host blacklisted?
+               final String remoteAddr = Utils.getRemoteAddress(ctx);
+               if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
+
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+                                       DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+                                       "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
+                                       null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                       ctx.getRequest().getRemoteHost(), null, null);
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               }
+
+               int limit = CambriaConstants.kNoLimit;
+               if (req.getParameter("limit") != null) {
+                       limit = Integer.parseInt(req.getParameter("limit"));
+               }
+
+               int timeoutMs = CambriaConstants.kNoTimeout;
+               String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout");
+               if (strtimeoutMS != null)
+                       timeoutMs = Integer.parseInt(strtimeoutMS);
+               // int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout",
+               // CambriaConstants.kNoTimeout);
+               if (req.getParameter("timeout") != null) {
+                       timeoutMs = Integer.parseInt(req.getParameter("timeout"));
+               }
+
+               // By default no filter is applied if filter is not passed as a
+               // parameter in the request URI
+               String topicFilter = CambriaConstants.kNoFilter;
+               if (null != req.getParameter("filter")) {
+                       topicFilter = req.getParameter("filter");
+               }
+               // pretty to print the messaages in new line
+               String prettyval = "0";
+               String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty");
+               if (null != strPretty)
+                       prettyval = strPretty;
+
+               String metaval = "0";
+               String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta");
+               if (null != strmeta)
+                       metaval = strmeta;
+
+               final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval);
+               // withMeta to print offset along with message
+               final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval);
+
+               final LogWrap logger = new LogWrap(topic, consumerGroup, clientId);
+               logger.info("fetch: timeout=" + timeoutMs + ", limit=" + limit + ", filter=" + topicFilter + " from Remote host "+ctx.getRequest().getRemoteHost());
+
+               // is this user allowed to read this topic?
+               final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+               final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+
+               if (metatopic == null) {
+                       // no such topic.
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+                                       DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),
+                                       errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()),
+                                       topic, null, null, consumerGroup + "/" + clientId, ctx.getRequest().getRemoteHost());
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               }
+               String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "metrics.send.cambria.topic");
+               if (null == metricTopicname)
+                       metricTopicname = "msgrtr.apinode.metrics.dmaap";
+
+               if (null == ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) {
+                       if (null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))) {
+                               // check permissions
+                               metatopic.checkUserRead(user);
+                       }
+               }
+               // if headers are not provided then user will be null
+               if (user == null && null != ctx.getRequest().getHeader("Authorization")) {
+                       // the topic name will be sent by the client
+                       // String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"sub";
+                       DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+                       String permission = aaf.aafPermissionString(topic, "sub");
+                       if (!aaf.aafAuthentication(ctx.getRequest(), permission)) {
+                               ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+                                               DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+                                               errorMessages.getNotPermitted1() + " read " + errorMessages.getNotPermitted2() + topic + " on "
+                                                               + permission,
+                                               null, Utils.getFormattedDate(new Date()), topic, null, null, consumerGroup + "/" + clientId,
+                                               ctx.getRequest().getRemoteHost());
+                               LOG.info(errRes.toString());
+                               throw new DMaaPAccessDeniedException(errRes);
+
+                       }
+                       isAAFTopic = true;
+               }
+               final long elapsedMs1 = System.currentTimeMillis() - startTime;
+               logger.info("Time taken in getEvents Authorization " + elapsedMs1 + " ms for " + topic + " " + consumerGroup
+                               + " " + clientId);
+               Consumer c = null;
+               // String localclientId = clientId;
+               String lhostId = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "clusterhostid");
+               if (null == lhostId) {
+                       try {
+                               lhostId = InetAddress.getLocalHost().getCanonicalHostName();
+                       } catch (UnknownHostException e) {
+                               LOG.info("Unknown Host Exception error occured while getting getting hostid");
+                       }
+
+               }
+                CambriaOutboundEventStream coes = null;
+               try {
+                       final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+                       final DMaaPCambriaLimiter rl = ctx.getConfigReader().getfRateLimiter();
+                       rl.onCall(topic, consumerGroup, clientId, ctx.getRequest().getRemoteHost());
+                       c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs,
+                                       ctx.getRequest().getRemoteHost());
+                       coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs)
+                                       .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build();
+                       coes.setDmaapContext(ctx);
+                       coes.setTopic(metatopic);
+                       if (isTransEnabled() || isAAFTopic) {
+                               coes.setTransEnabled(true);
+                       } else {
+                               coes.setTransEnabled(false);
+                       }
+                       coes.setTopicStyle(isAAFTopic);
+                       final long elapsedMs2 = System.currentTimeMillis() - startTime;
+                       logger.info("Time taken in getEvents getConsumerFor " + elapsedMs2 + " ms for " + topic + " "
+                                       + consumerGroup + " " + clientId);
+
+                       DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+               
+                       DMaaPResponseBuilder.respondOkWithStream(ctx, MediaType.APPLICATION_JSON, coes);
+                       // No IOException thrown during respondOkWithStream, so commit the
+                       // new offsets to all the brokers
+                       c.commitOffsets();
+                       final int sent = coes.getSentCount();
+
+                        metricsSet.consumeTick(sent);
+                    rl.onSend(topic, consumerGroup, clientId, sent);
+                       final long elapsedMs = System.currentTimeMillis() - startTime;
+                       logger.info("Sent " + sent + " msgs in " + elapsedMs + " ms; committed to offset " + c.getOffset() + " for "
+                                       + topic + " " + consumerGroup + " " + clientId + " on to the server "
+                                       + ctx.getRequest().getRemoteHost());
+
+               } catch (UnavailableException excp) {
+                       logger.warn(excp.getMessage(), excp);
+
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+                                       DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+                                       errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
+                                       null, null, consumerGroup + "-" + clientId, ctx.getRequest().getRemoteHost());
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+
+               }  catch (java.util.ConcurrentModificationException excp1) {
+                       LOG.info(excp1.getMessage() + "on " + topic + " " + consumerGroup + " ****** " + clientId + " from Remote"+ctx.getRequest().getRemoteHost());
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_CONFLICT,
+                                       DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(),
+                                       "Couldn't respond to client, possible of consumer requests from more than one server. Please contact MR team if you see this issue occurs continously", null,
+                                       Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost());
+                       logger.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+                       
+               } catch (CambriaApiException excp) {
+                       LOG.info(excp.getMessage() + "on " + topic + " " + consumerGroup + " ****** " + clientId);
+                       
+                       throw excp;
+               }
+               catch (Exception excp) {
+                       // System.out.println(excp + "------------------ " + topic+"
+                       // "+consumerGroup+" "+clientId);
+
+                       logger.info("Couldn't respond to client, closing cambria consumer " + " " + topic + " " + consumerGroup
+                                       + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE + " ****** " + excp);
+                                       
+                               ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
+
+                       
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+                                       DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+                                       "Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null,
+                                       Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost());
+                       logger.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               } finally {
+                       coes = null;
+                       // If no cache, close the consumer now that we're done with it.
+                       boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
+                       String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                                       ConsumerFactory.kSetting_EnableCache);
+                       if (null != strkSetting_EnableCache)
+                               kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
+                       // if
+                       // (!ctx.getConfigReader().getSettings().getBoolean(ConsumerFactory.kSetting_EnableCache,
+                       // ConsumerFactory.kDefault_IsCacheEnabled) && (c != null)) {
+                       if (!kSetting_EnableCache && (c != null)) {
+                               try {
+                                       c.close();
+                               } catch (Exception e) {
+                                       logger.info("***Exception occured in getEvents finaly block while closing the consumer " + " "
+                                                       + topic + " " + consumerGroup + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE
+                                                       + " " + e);
+                               }
+                       }
+               }
+       }
+
+       /**
+        * @throws missingReqdSetting
+        * 
+        */
+       @Override
+       public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+                       final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+                       CambriaApiException, IOException, missingReqdSetting, DMaaPAccessDeniedException {
+
+               // is this user allowed to write to this topic?
+               final long startMs = System.currentTimeMillis();
+               final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+               final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+               boolean isAAFTopic = false;
+
+               // was this host blacklisted?
+               final String remoteAddr = Utils.getRemoteAddress(ctx);
+
+               if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
+
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+                                       DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+                                       "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
+                                       null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                       ctx.getRequest().getRemoteHost(), null, null);
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               }
+
+               String topicNameStd = null;
+
+               // topicNameStd=
+               // ctx.getConfigReader().getSettings().getString("enforced.topic.name.AAF");
+               topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
+                               "enforced.topic.name.AAF");
+               String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "metrics.send.cambria.topic");
+               if (null == metricTopicname)
+                       metricTopicname = "msgrtr.apinode.metrics.dmaap";
+               boolean topicNameEnforced = false;
+               if (null != topicNameStd && topic.startsWith(topicNameStd)) {
+                       topicNameEnforced = true;
+               }
+
+               // Here check if the user has rights to publish on the topic
+               // ( This will be called when no auth is added or when UEB API Key
+               // Authentication is used)
+               // checkUserWrite(user) method will throw an error when there is no Auth
+               // header added or when the
+               // user has no publish rights
+
+               if (null != metatopic && null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))
+                               && null == ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) {
+                       metatopic.checkUserWrite(user);
+               }
+
+               // if headers are not provided then user will be null
+               if (topicNameEnforced || (user == null && null != ctx.getRequest().getHeader("Authorization")
+                               && !topic.equalsIgnoreCase(metricTopicname))) {
+                       // the topic name will be sent by the client
+                       // String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"pub";
+                       DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+                       String permission = aaf.aafPermissionString(topic, "pub");
+                       if (!aaf.aafAuthentication(ctx.getRequest(), permission)) {
+                               ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+                                               DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+                                               errorMessages.getNotPermitted1() + " publish " + errorMessages.getNotPermitted2() + topic
+                                                               + " on " + permission,
+                                               null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                               ctx.getRequest().getRemoteHost(), null, null);
+                               LOG.info(errRes.toString());
+                               throw new DMaaPAccessDeniedException(errRes);
+                       }
+                       isAAFTopic = true;
+               }
+
+               final HttpServletRequest req = ctx.getRequest();
+
+               // check for chunked input
+               boolean chunked = false;
+               if (null != req.getHeader(TRANSFER_ENCODING)) {
+                       chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
+               }
+               // get the media type, or set it to a generic value if it wasn't
+               // provided
+               String mediaType = req.getContentType();
+               if (mediaType == null || mediaType.length() == 0) {
+                       mediaType = MimeTypes.kAppGenericBinary;
+               }
+
+               if (mediaType.contains("charset=UTF-8")) {
+                       mediaType = mediaType.replace("; charset=UTF-8", "").trim();
+               }
+
+               String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "transidUEBtopicreqd");
+               boolean istransidreqd = false;
+               if (null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) {
+                       istransidreqd = true;
+               }
+
+               if (isAAFTopic || istransidreqd) {
+                       pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
+               } else {
+                       pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
+               }
+               final long endMs = System.currentTimeMillis();
+               final long totalMs = endMs - startMs;
+
+               LOG.info("Overall Response time - Published " + " msgs in " + totalMs + " ms for topic " + topic);
+
+       }
+
+       /**
+        * 
+        * @param ctx
+        * @param topic
+        * @param msg
+        * @param defaultPartition
+        * @param chunked
+        * @param mediaType
+        * @throws ConfigDbException
+        * @throws AccessDeniedException
+        * @throws TopicExistsException
+        * @throws CambriaApiException
+        * @throws IOException
+        */
+       private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked,
+                       String mediaType)
+                       throws ConfigDbException, AccessDeniedException, TopicExistsException, CambriaApiException, IOException {
+               final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+               // setup the event set
+               final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
+
+               // start processing, building a batch to push to the backend
+               final long startMs = System.currentTimeMillis();
+               long count = 0;
+               long maxEventBatch = 1024 * 16;
+               String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
+               if (null != batchlen)
+                       maxEventBatch = Long.parseLong(batchlen);
+               // long maxEventBatch =
+               // ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
+               final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
+               // final ArrayList<KeyedMessage<String, String>> kms = new
+               // ArrayList<KeyedMessage<String, String>>();
+               final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
+               try {
+                       // for each message...
+                       Publisher.message m = null;
+                       while ((m = events.next()) != null) {
+                               // add the message to the batch
+                               batch.add(m);
+                               // final KeyedMessage<String, String> data = new
+                               // KeyedMessage<String, String>(topic, m.getKey(),
+                               // m.getMessage());
+                               // kms.add(data);
+                               final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
+                                               m.getMessage());
+
+                               pms.add(data);
+                               // check if the batch is full
+                               final int sizeNow = batch.size();
+                               if (sizeNow > maxEventBatch) {
+                                       // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
+                                       // kms);
+                                       // kms.clear();
+                                       ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                                       pms.clear();
+                                       batch.clear();
+                                       metricsSet.publishTick(sizeNow);
+                                       count += sizeNow;
+                               }
+                       }
+
+                       // send the pending batch
+                       final int sizeNow = batch.size();
+                       if (sizeNow > 0) {
+                               // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
+                               // kms);
+                               // kms.clear();
+                               ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                               pms.clear();
+                               batch.clear();
+                               metricsSet.publishTick(sizeNow);
+                               count += sizeNow;
+                       }
+
+                       final long endMs = System.currentTimeMillis();
+                       final long totalMs = endMs - startMs;
+
+                       LOG.info("Published " + count + " msgs in " + totalMs + " ms for topic " + topic + " from server "
+                                       + ctx.getRequest().getRemoteHost());
+
+                       // build a responseP
+                       final JSONObject response = new JSONObject();
+                       response.put("count", count);
+                       response.put("serverTimeMs", totalMs);
+                       DMaaPResponseBuilder.respondOk(ctx, response);
+
+               } catch (Exception excp) {
+                       int status = HttpStatus.SC_NOT_FOUND;
+                       String errorMsg = null;
+                       if (excp instanceof CambriaApiException) {
+                               status = ((CambriaApiException) excp).getStatus();
+                               JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+                               JSONObject errObject = new JSONObject(jsonTokener);
+                               errorMsg = (String) errObject.get("message");
+
+                       }
+                       ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+                                       errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count
+                                                       + "." + errorMsg,
+                                       null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null,
+                                       null);
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+
+               }
+       }
+
+       /**
+        * 
+        * @param ctx
+        * @param inputStream
+        * @param topic
+        * @param partitionKey
+        * @param requestTime
+        * @param chunked
+        * @param mediaType
+        * @throws ConfigDbException
+        * @throws AccessDeniedException
+        * @throws TopicExistsException
+        * @throws IOException
+        * @throws CambriaApiException
+        */
+       private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
+                       final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
+                       throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException, CambriaApiException {
+
+               final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+               // setup the event set
+               final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
+
+               // start processing, building a batch to push to the backend
+               final long startMs = System.currentTimeMillis();
+               long count = 0;
+               long maxEventBatch = 1024 * 16;
+               String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
+               if (null != evenlen)
+                       maxEventBatch = Long.parseLong(evenlen);
+               // final long maxEventBatch =
+               // ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
+               final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
+               // final ArrayList<KeyedMessage<String, String>> kms = new
+               // ArrayList<KeyedMessage<String, String>>();
+               final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
+               Publisher.message m = null;
+               int messageSequence = 1;
+               Long batchId = 1L;
+               final boolean transactionEnabled = true;
+               int publishBatchCount = 0;
+               SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
+
+               // LOG.warn("Batch Start Id: " +
+               // Utils.getFromattedBatchSequenceId(batchId));
+               try {
+                       // for each message...
+                       batchId = DMaaPContext.getBatchID();
+
+                       String responseTransactionId = null;
+
+                       while ((m = events.next()) != null) {
+
+                               // LOG.warn("Batch Start Id: " +
+                               // Utils.getFromattedBatchSequenceId(batchId));
+
+                               addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
+                                               transactionEnabled);
+                               messageSequence++;
+
+                               // add the message to the batch
+                               batch.add(m);
+
+                               responseTransactionId = m.getLogDetails().getTransactionId();
+
+                               JSONObject jsonObject = new JSONObject();
+                               jsonObject.put("msgWrapMR", m.getMessage());
+                               jsonObject.put("transactionId", responseTransactionId);
+                               // final KeyedMessage<String, String> data = new
+                               // KeyedMessage<String, String>(topic, m.getKey(),
+                               // jsonObject.toString());
+                               // kms.add(data);
+                               final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
+                                               m.getMessage());
+
+                               pms.add(data);
+                               // check if the batch is full
+                               final int sizeNow = batch.size();
+                               if (sizeNow >= maxEventBatch) {
+                                       String startTime = sdf.format(new Date());
+                                       LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+                                                       + batchId + "]");
+                                       try {
+                                               // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
+                                               // kms);
+                                               ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                                               // transactionLogs(batch);
+                                               for (message msg : batch) {
+                                                       LogDetails logDetails = msg.getLogDetails();
+                                                       LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+                                               }
+                                       } catch (Exception excp) {
+
+                                               int status = HttpStatus.SC_NOT_FOUND;
+                                               String errorMsg = null;
+                                               if (excp instanceof CambriaApiException) {
+                                                       status = ((CambriaApiException) excp).getStatus();
+                                                       JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+                                                       JSONObject errObject = new JSONObject(jsonTokener);
+                                                       errorMsg = (String) errObject.get("message");
+                                               }
+                                               ErrorResponse errRes = new ErrorResponse(status,
+                                                               DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+                                                               "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+                                                                               + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+                                                               null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                                               ctx.getRequest().getRemoteHost(), null, null);
+                                               LOG.info(errRes.toString());
+                                               throw new CambriaApiException(errRes);
+                                       }
+                                       pms.clear();
+                                       batch.clear();
+                                       metricsSet.publishTick(sizeNow);
+                                       publishBatchCount = sizeNow;
+                                       count += sizeNow;
+                                       // batchId++;
+                                       String endTime = sdf.format(new Date());
+                                       LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
+                                                       + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
+                                                       + ",Batch End Time=" + endTime + "]");
+                                       batchId = DMaaPContext.getBatchID();
+                               }
+                       }
+
+                       // send the pending batch
+                       final int sizeNow = batch.size();
+                       if (sizeNow > 0) {
+                               String startTime = sdf.format(new Date());
+                               LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+                                               + batchId + "]");
+                               try {
+                                       // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
+                                       // kms);
+                                       ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                                       // transactionLogs(batch);
+                                       for (message msg : batch) {
+                                               LogDetails logDetails = msg.getLogDetails();
+                                               LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+                                       }
+                               } catch (Exception excp) {
+                                       int status = HttpStatus.SC_NOT_FOUND;
+                                       String errorMsg = null;
+                                       if (excp instanceof CambriaApiException) {
+                                               status = ((CambriaApiException) excp).getStatus();
+                                               JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+                                               JSONObject errObject = new JSONObject(jsonTokener);
+                                               errorMsg = (String) errObject.get("message");
+                                       }
+
+                                       ErrorResponse errRes = new ErrorResponse(status,
+                                                       DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+                                                       "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+                                                                       + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+                                                       null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                                       ctx.getRequest().getRemoteHost(), null, null);
+                                       LOG.info(errRes.toString());
+                                       throw new CambriaApiException(errRes);
+                               }
+                               pms.clear();
+                               metricsSet.publishTick(sizeNow);
+                               count += sizeNow;
+                               // batchId++;
+                               String endTime = sdf.format(new Date());
+                               publishBatchCount = sizeNow;
+                               LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId
+                                               + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time="
+                                               + endTime + "]");
+                       }
+
+                       final long endMs = System.currentTimeMillis();
+                       final long totalMs = endMs - startMs;
+
+                       LOG.info("Published " + count + " msgs(with transaction id) in " + totalMs + " ms for topic " + topic);
+
+                       if (null != responseTransactionId) {
+                               ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId));
+                       }
+
+                       // build a response
+                       final JSONObject response = new JSONObject();
+                       response.put("count", count);
+                       response.put("serverTimeMs", totalMs);
+                       DMaaPResponseBuilder.respondOk(ctx, response);
+
+               } catch (Exception excp) {
+                       int status = HttpStatus.SC_NOT_FOUND;
+                       String errorMsg = null;
+                       if (excp instanceof CambriaApiException) {
+                               status = ((CambriaApiException) excp).getStatus();
+                               JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+                               JSONObject errObject = new JSONObject(jsonTokener);
+                               errorMsg = (String) errObject.get("message");
+                       }
+
+                       ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+                                       "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+                                                       + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+                                       null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                       ctx.getRequest().getRemoteHost(), null, null);
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               }
+       }
+
+       /**
+        * 
+        * @param msg
+        * @param topic
+        * @param request
+        * @param messageCreationTime
+        * @param messageSequence
+        * @param batchId
+        * @param transactionEnabled
+        */
+       private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
+                       final String messageCreationTime, final int messageSequence, final Long batchId,
+                       final boolean transactionEnabled) {
+               LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
+                               transactionEnabled);
+               logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
+               msg.setTransactionEnabled(transactionEnabled);
+               msg.setLogDetails(logDetails);
+       }
+
+       /**
+        * 
+        * @author anowarul.islam
+        *
+        */
+       private static class LogWrap {
+               private final String fId;
+
+               /**
+                * constructor initialization
+                * 
+                * @param topic
+                * @param cgroup
+                * @param cid
+                */
+               public LogWrap(String topic, String cgroup, String cid) {
+                       fId = "[" + topic + "/" + cgroup + "/" + cid + "] ";
+               }
+
+               /**
+                * 
+                * @param msg
+                */
+               public void info(String msg) {
+                       LOG.info(fId + msg);
+               }
+
+               /**
+                * 
+                * @param msg
+                * @param t
+                */
+               public void warn(String msg, Exception t) {
+                       LOG.warn(fId + msg, t);
+               }
+
+       }
+
+       public boolean isTransEnabled() {
+               String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "transidUEBtopicreqd");
+               boolean istransidreqd = false;
+               if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true"))) {
+                       istransidreqd = true;
+               }
+
+               return istransidreqd;
+
+       }
+
+       private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
+                       final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
+               LogDetails logDetails = new LogDetails();
+               logDetails.setTopicId(topicName);
+               logDetails.setMessageTimestamp(messageTimestamp);
+               logDetails.setPublisherId(Utils.getUserApiKey(request));
+               logDetails.setPublisherIp(request.getRemoteHost());
+               logDetails.setMessageBatchId(batchId);
+               logDetails.setMessageSequence(String.valueOf(messageSequence));
+               logDetails.setTransactionEnabled(transactionEnabled);
+               logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
+               logDetails.setServerIp(request.getLocalAddr());
+               return logDetails;
+       }
+
+       /*
+        * public String getMetricsTopic() { return metricsTopic; }
+        * 
+        * public void setMetricsTopic(String metricsTopic) { this.metricsTopic =
+        * metricsTopic; }
+        */
+       
+
+
+}
\ No newline at end of file
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
@@ -34,9 +34,7 @@ import javax.servlet.http.HttpServletResponse;
 import javax.ws.rs.core.Context;
 
 import org.apache.http.HttpStatus;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
+import org.apache.kafka.clients.producer.ProducerRecord;
 import org.json.JSONObject;
 import org.json.JSONTokener;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -44,39 +42,36 @@ import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Service;
 
 import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.ConsumerFactory;
-import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.backends.MetricsSet;
-import com.att.nsa.cambria.backends.Publisher;
-
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.beans.LogDetails;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;
-import com.att.nsa.cambria.exception.DMaaPResponseCode;
-import com.att.nsa.cambria.exception.ErrorResponse;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
-import com.att.nsa.cambria.metabroker.Topic;
-import com.att.nsa.cambria.resources.CambriaEventSet;
-import com.att.nsa.cambria.resources.CambriaOutboundEventStream;
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticator;
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
-import com.att.nsa.cambria.service.MMService;
-import com.att.nsa.cambria.utils.ConfigurationReader;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
-import com.att.nsa.cambria.utils.Utils;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.ConsumerFactory;
+import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.dmf.mr.backends.Publisher;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.beans.LogDetails;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.exception.DMaaPErrorMessages;
+import com.att.dmf.mr.exception.DMaaPResponseCode;
+import com.att.dmf.mr.exception.ErrorResponse;
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.resources.CambriaEventSet;
+import com.att.dmf.mr.resources.CambriaOutboundEventStream;
+import com.att.dmf.mr.service.MMService;
+import com.att.dmf.mr.utils.ConfigurationReader;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.utils.Utils;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.drumlin.service.standards.MimeTypes;
 import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
 import com.att.nsa.util.rrConvertor;
 
-import kafka.producer.KeyedMessage;
+
 
 @Service
 public class MMServiceImpl implements MMService {
@@ -202,7 +197,7 @@ public class MMServiceImpl implements MMService {
                try {
                        final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
 
-                       c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs);
+                       c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs,ctx.getRequest().getRemoteHost());
 
                        final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs)
                                        .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build();
@@ -214,7 +209,7 @@ public class MMServiceImpl implements MMService {
                        try {
                                coes.write(baos);
                        } catch (Exception ex) {
-                               LOG.error("Exception at subscribe(.......): " + ex);
+
                        }
 
                        c.commitOffsets();
@@ -223,7 +218,7 @@ public class MMServiceImpl implements MMService {
                        metricsSet.consumeTick(sent);
 
                } catch (UnavailableException excp) {
-                       LOG.error(excp.toString());
+
                        ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
                                        DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
                                        errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
@@ -235,7 +230,6 @@ public class MMServiceImpl implements MMService {
 
                        throw excp;
                } catch (Exception excp) {
-                       LOG.error(excp.toString());
 
                        ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
 
@@ -363,7 +357,8 @@ public class MMServiceImpl implements MMService {
                // long maxEventBatch =
                // ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
                final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
-               final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+               final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
+               //final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
 
                try {
                        // for each message...
@@ -371,14 +366,13 @@ public class MMServiceImpl implements MMService {
                        while ((m = events.next()) != null) {
                                // add the message to the batch
                                batch.add(m);
-                               final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
+                               final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
                                                m.getMessage());
-                               kms.add(data);
                                // check if the batch is full
                                final int sizeNow = batch.size();
                                if (sizeNow > maxEventBatch) {
-                                       ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-                                       kms.clear();
+                                       ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                                       pms.clear();
                                        batch.clear();
                                        metricsSet.publishTick(sizeNow);
                                        count += sizeNow;
@@ -388,8 +382,8 @@ public class MMServiceImpl implements MMService {
                        // send the pending batch
                        final int sizeNow = batch.size();
                        if (sizeNow > 0) {
-                               ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-                               kms.clear();
+                               ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                               pms.clear();
                                batch.clear();
                                metricsSet.publishTick(sizeNow);
                                count += sizeNow;
@@ -447,7 +441,7 @@ public class MMServiceImpl implements MMService {
                        maxEventBatch = Long.parseLong(evenlen);
 
                final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
-               final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+               final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
 
                Publisher.message m = null;
                int messageSequence = 1;
@@ -481,9 +475,9 @@ public class MMServiceImpl implements MMService {
                                JSONObject jsonObject = new JSONObject();
                                jsonObject.put("message", m.getMessage());
                                jsonObject.put("transactionId", responseTransactionId);
-                               final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
-                                               jsonObject.toString());
-                               kms.add(data);
+                               final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
+                                               m.getMessage());
+                               pms.add(data);
 
                                // check if the batch is full
                                final int sizeNow = batch.size();
@@ -492,7 +486,7 @@ public class MMServiceImpl implements MMService {
                                        LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
                                                        + batchId + "]");
                                        try {
-                                               ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+                                               ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
                                                // transactionLogs(batch);
                                                for (message msg : batch) {
                                                        LogDetails logDetails = msg.getLogDetails();
@@ -517,7 +511,7 @@ public class MMServiceImpl implements MMService {
                                                LOG.info(errRes.toString());
                                                throw new CambriaApiException(errRes);
                                        }
-                                       kms.clear();
+                                       pms.clear();
                                        batch.clear();
                                        metricsSet.publishTick(sizeNow);
                                        publishBatchCount = sizeNow;
@@ -538,7 +532,7 @@ public class MMServiceImpl implements MMService {
                                LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
                                                + batchId + "]");
                                try {
-                                       ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+                                       ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
                                        // transactionLogs(batch);
                                        for (message msg : batch) {
                                                LogDetails logDetails = msg.getLogDetails();
@@ -563,7 +557,7 @@ public class MMServiceImpl implements MMService {
                                        LOG.info(errRes.toString());
                                        throw new CambriaApiException(errRes);
                                }
-                               kms.clear();
+                               pms.clear();
                                metricsSet.publishTick(sizeNow);
                                count += sizeNow;
                                // batchId++;
@@ -8,31 +8,31 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
 
 import java.io.IOException;
 
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
 import org.json.JSONObject;
 import org.springframework.stereotype.Component;
 
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.MetricsSet;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.service.MetricsService;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.service.MetricsService;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
 import com.att.nsa.metrics.CdmMeasuredItem;
 
 /**
@@ -42,7 +42,7 @@ import com.att.nsa.metrics.CdmMeasuredItem;
  * get metrics details
  * 
  * 
- * @author author
+ * @author nilanjana.maity
  *
  *
  */
@@ -1,3 +1,6 @@
+/**
+ * 
+ */
 /*******************************************************************************
  *  ============LICENSE_START=======================================================
  *  org.onap.dmaap
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-/**
- * 
- */
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
 
 import java.io.IOException;
 
 import org.apache.http.HttpStatus;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
 import org.json.JSONArray;
 import org.json.JSONException;
 import org.json.JSONObject;
@@ -36,46 +34,51 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Service;
 
 import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker;
-import com.att.nsa.cambria.beans.TopicBean;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;
-import com.att.nsa.cambria.exception.DMaaPResponseCode;
-import com.att.nsa.cambria.exception.ErrorResponse;
-import com.att.nsa.cambria.metabroker.Broker;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
-import com.att.nsa.cambria.metabroker.Topic;
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticator;
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;
-import com.att.nsa.cambria.service.TopicService;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import com.att.dmf.mr.beans.TopicBean;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.exception.DMaaPAccessDeniedException;
+import com.att.dmf.mr.exception.DMaaPErrorMessages;
+import com.att.dmf.mr.exception.DMaaPResponseCode;
+import com.att.dmf.mr.exception.ErrorResponse;
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
+import com.att.dmf.mr.metabroker.Broker1;
+//import com.att.dmf.mr.metabroker.Broker1;
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.security.DMaaPAAFAuthenticator;
+import com.att.dmf.mr.security.DMaaPAAFAuthenticatorImpl;
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
+import com.att.dmf.mr.service.TopicService;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.security.NsaAcl;
 import com.att.nsa.security.NsaApiKey;
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
 
 /**
- * @author author
+ * @author muzainulhaque.qazi
  *
  */
 @Service
 public class TopicServiceImpl implements TopicService {
 
-       //private static final Logger LOGGER = Logger.getLogger(TopicServiceImpl.class);
+       // private static final Logger LOGGER =
+       // Logger.getLogger(TopicServiceImpl.class);
        private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(TopicServiceImpl.class);
        @Autowired
        private DMaaPErrorMessages errorMessages;
-       
-       
-       
-       //@Value("${msgRtr.topicfactory.aaf}")
-       //private String mrFactory;
-       
-       
+
+       // @Value("${msgRtr.topicfactory.aaf}")
+       // private String mrFactory;
+
+       public DMaaPErrorMessages getErrorMessages() {
+               return errorMessages;
+       }
+
        public void setErrorMessages(DMaaPErrorMessages errorMessages) {
                this.errorMessages = errorMessages;
        }
@@ -89,7 +92,6 @@ public class TopicServiceImpl implements TopicService {
         */
        @Override
        public void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException {
-
                LOGGER.info("Fetching list of all the topics.");
                JSONObject json = new JSONObject();
 
@@ -123,7 +125,7 @@ public class TopicServiceImpl implements TopicService {
                for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) {
                        JSONObject obj = new JSONObject();
                        obj.put("topicName", topic.getName());
-                       //obj.put("description", topic.getDescription());
+                       // obj.put("description", topic.getDescription());
                        obj.put("owner", topic.getOwner());
                        obj.put("txenabled", topic.isTransactionEnabled());
                        topicsList.put(obj);
@@ -136,7 +138,6 @@ public class TopicServiceImpl implements TopicService {
 
        }
 
-       
        /**
         * @param dmaapContext
         * @param topicName
@@ -157,24 +158,21 @@ public class TopicServiceImpl implements TopicService {
                }
 
                JSONObject o = new JSONObject();
-               o.put ( "name", t.getName () );
-               o.put ( "description", t.getDescription () );
-               
-               if (null!=t.getOwners ())
-               o.put ( "owner", t.getOwners ().iterator ().next () );
-               if(null!=t.getReaderAcl ())
-               {
-                       o.put ( "readerAcl", aclToJson ( t.getReaderAcl () ) );
-               }
-               if(null!=t.getWriterAcl ())
-               o.put ( "writerAcl", aclToJson ( t.getWriterAcl () ) );
-       
+               o.put("name", t.getName());
+               o.put("description", t.getDescription());
+
+               if (null != t.getOwners())
+                       o.put("owner", t.getOwners().iterator().next());
+               if (null != t.getReaderAcl())
+                       o.put("readerAcl", aclToJson(t.getReaderAcl()));
+               if (null != t.getWriterAcl())
+                       o.put("writerAcl", aclToJson(t.getWriterAcl()));
+
                LOGGER.info("Returning details of topic " + topicName);
                DMaaPResponseBuilder.respondOk(dmaapContext, o);
 
        }
 
-       
        /**
         * @param dmaapContext
         * @param topicBean
@@ -189,109 +187,139 @@ public class TopicServiceImpl implements TopicService {
         */
        @Override
        public void createTopic(DMaaPContext dmaapContext, TopicBean topicBean)
-                       throws CambriaApiException, DMaaPAccessDeniedException,IOException, TopicExistsException {
+                       throws CambriaApiException, DMaaPAccessDeniedException, IOException, TopicExistsException {
 
                LOGGER.info("Creating topic " + topicBean.getTopicName());
-               
+
                final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
                String key = null;
-               String appName=dmaapContext.getRequest().getHeader("AppName");
-               String enfTopicName= com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,"enforced.topic.name.AAF");
-               if(user != null)
-               {
+               //String appName = dmaapContext.getRequest().getHeader("AppName");
+               String enfTopicName = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
+                               "enforced.topic.name.AAF");
+
+               if (user != null) {
                        key = user.getKey();
-                       
-                       if(  enfTopicName != null && topicBean.getTopicName().indexOf(enfTopicName) >=0 ) {
-                               
-                               LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
-                               
-                               ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
-                                               DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-                                               errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
+
+                       if (enfTopicName != null && topicBean.getTopicName().indexOf(enfTopicName) >= 0) {
+
+                               LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed.");
+
+                               ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+                                               DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+                                               "Failed to create topic: Access Denied.User does not have permission to perform create topic");
+
                                LOGGER.info(errRes.toString());
-                               throw new DMaaPAccessDeniedException(errRes);
-                               
+                               // throw new DMaaPAccessDeniedException(errRes);
+
                        }
                }
-                               
-               //else if (user==null && (null==dmaapContext.getRequest().getHeader("Authorization") && null == dmaapContext.getRequest().getHeader("cookie")) ) {
-                       /*else if (user == null &&  null==dmaapContext.getRequest().getHeader("Authorization")     && 
-                                        (null == appName  &&  null == dmaapContext.getRequest().getHeader("cookie"))) {
-                       LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
-                       
-                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
-                                       DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-                                       errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
+               // else if (user==null &&
+               // (null==dmaapContext.getRequest().getHeader("Authorization") && null
+               // == dmaapContext.getRequest().getHeader("cookie")) ) {
+               /*else if (user == null && null == dmaapContext.getRequest().getHeader("Authorization")
+                               ) {
+                       LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed.");
+
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+                                       DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+                                       "Failed to create topic: Access Denied.User does not have permission to perform create topic");
+
                        LOGGER.info(errRes.toString());
-                       throw new DMaaPAccessDeniedException(errRes);
+                       // throw new DMaaPAccessDeniedException(errRes);
                }*/
-               
-               if (user == null &&  (null!=dmaapContext.getRequest().getHeader("Authorization"))) {
-                       //if (user == null && (null!=dmaapContext.getRequest().getHeader("Authorization") || null != dmaapContext.getRequest().getHeader("cookie"))) {
-                        // ACL authentication is not provided so we will use the aaf authentication
-                       LOGGER.info("Authorization the topic");
-               
+
+               if (user == null /*&& (null != dmaapContext.getRequest().getHeader("Authorization")
+                               )*/) {
+                       // if (user == null &&
+                       // (null!=dmaapContext.getRequest().getHeader("Authorization") ||
+                       // null != dmaapContext.getRequest().getHeader("cookie"))) {
+                       // ACL authentication is not provided so we will use the aaf
+                       // authentication
+                       /*LOGGER.info("Authorization the topic");
+
                        String permission = "";
-                       String nameSpace="";
-                       if(topicBean.getTopicName().indexOf(".")>1)
-                        nameSpace = topicBean.getTopicName().substring(0,topicBean.getTopicName().lastIndexOf("."));
-               
-                        String mrFactoryVal=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"msgRtr.topicfactory.aaf");
-               
-                       //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
-                       
-                       permission = mrFactoryVal+nameSpace+"|create";
-                       DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-                       
-                       if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-                       {
-                               
-                               LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
-                               
-                               ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
-                                               DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-                                               errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
+                       String nameSpace = "";
+                       if (topicBean.getTopicName().indexOf(".") > 1)
+                               nameSpace = topicBean.getTopicName().substring(0, topicBean.getTopicName().lastIndexOf("."));
+
+                       String mrFactoryVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                                       "msgRtr.topicfactory.aaf");
+
+                       // AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
+
+                       permission = mrFactoryVal + nameSpace + "|create";
+                       DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();*/
+
+                       //if (!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) {
+                       if (false) {
+                               LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed.");
+
+                               ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+                                               DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+                                               "Failed to create topic: Access Denied.User does not have permission to create topic with perm "
+                                                               //+ permission);
+                                               + "permission");
+                                               
+
                                LOGGER.info(errRes.toString());
                                throw new DMaaPAccessDeniedException(errRes);
-                               
-                       }else{
-                               // if user is null and aaf authentication is ok then key should be ""
-                               //key = "";
+
+                       } else {
+                               // if user is null and aaf authentication is ok then key should
+                               // be ""
+                               // key = "";
                                /**
                                 * Added as part of AAF user it should return username
                                 */
-                               
-                               key = dmaapContext.getRequest().getUserPrincipal().getName().toString();
-                               LOGGER.info("key ==================== "+key);
-                               
+
+                               //key = dmaapContext.getRequest().getUserPrincipal().getName().toString();
+                               key="admin";
+                               LOGGER.info("key ==================== " + key);
+
                        }
                }
 
                try {
                        final String topicName = topicBean.getTopicName();
                        final String desc = topicBean.getTopicDescription();
+                       int partition = topicBean.getPartitionCount();
+                       // int replica = topicBean.getReplicationCount();
+                       if (partition == 0) {
+                               partition = 8;
+                       }
+                       final int partitions = partition;
 
-                       final  int partitions = topicBean.getPartitionCount();
-               
-                       final int replicas = topicBean.getReplicationCount();
+                       int replica = topicBean.getReplicationCount();
+                       if (replica == 0) {
+                               //replica = 3;
+                               replica = 1;
+                       }
+                       final int replicas = replica;
                        boolean transactionEnabled = topicBean.isTransactionEnabled();
-                       
 
-                       final Broker metabroker = getMetaBroker(dmaapContext);
-                       final Topic t = metabroker.createTopic(topicName, desc, key, partitions, replicas,
-                                       transactionEnabled);
+                       final Broker1 metabroker = getMetaBroker(dmaapContext);
+                       final Topic t = metabroker.createTopic(topicName, desc, key, partitions, replicas, transactionEnabled);
 
                        LOGGER.info("Topic created successfully. Sending response");
                        DMaaPResponseBuilder.respondOk(dmaapContext, topicToJson(t));
                } catch (JSONException excp) {
-                       
+
                        LOGGER.error("Failed to create topic. Couldn't parse JSON data.", excp);
-                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST, 
-                                       DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), 
-                                       errorMessages.getIncorrectJson());
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,
+                                       DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson());
+                       LOGGER.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+
+               } catch (ConfigDbException excp1) {
+
+                       LOGGER.error("Failed to create topic.  Config DB Exception", excp1);
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,
+                                       DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson());
                        LOGGER.info(errRes.toString());
                        throw new CambriaApiException(errRes);
-                       
+               } catch (com.att.dmf.mr.metabroker.Broker1.TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
                }
        }
 
@@ -305,38 +333,45 @@ public class TopicServiceImpl implements TopicService {
         * @throws AccessDeniedException
         */
        @Override
-       public void deleteTopic(DMaaPContext dmaapContext, String topicName)
-                       throws IOException, ConfigDbException, CambriaApiException, TopicExistsException, DMaaPAccessDeniedException, AccessDeniedException {
+       public void deleteTopic(DMaaPContext dmaapContext, String topicName) throws IOException, ConfigDbException,
+                       CambriaApiException, TopicExistsException, DMaaPAccessDeniedException, AccessDeniedException {
+
+               LOGGER.info(" Deleting topic " + topicName);
+               /*if (true) { // {
+                       LOGGER.error("Failed to delete topi" + topicName + ". Authentication failed.");
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+                                       DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), errorMessages.getCreateTopicFail() + " "
+                                                       + errorMessages.getNotPermitted1() + " delete " + errorMessages.getNotPermitted2());
+                       LOGGER.info(errRes.toString());
+                       throw new DMaaPAccessDeniedException(errRes);
+               }*/
 
-               LOGGER.info("Deleting topic " + topicName);
                final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
 
-               if (user == null && null!=dmaapContext.getRequest().getHeader("Authorization")) {
+               /*if (user == null && null != dmaapContext.getRequest().getHeader("Authorization")) {
                        LOGGER.info("Authenticating the user, as ACL authentication is not provided");
-//                     String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+                       // String permission =
+                       // "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
                        String permission = "";
-                       String nameSpace="";
-                       if(topicName.indexOf(".")>1){
-                       nameSpace = topicName.substring(0,topicName.lastIndexOf("."));
-                       }
-                        String mrFactoryVal=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"msgRtr.topicfactory.aaf");
-//                     String tokens[] = topicName.split(".mr.topic.");
-                       permission = mrFactoryVal+nameSpace+"|destroy";
+                       String nameSpace = topicName.substring(0, topicName.lastIndexOf("."));
+                       String mrFactoryVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                                       "msgRtr.topicfactory.aaf");
+                       // String tokens[] = topicName.split(".mr.topic.");
+                       permission = mrFactoryVal + nameSpace + "|destroy";
                        DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-                       if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-                       {
-                               LOGGER.error("Failed to delete topi"+topicName+". Authentication failed.");
-                               ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-                                               DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-                                               errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" delete "+errorMessages.getNotPermitted2());
+                       if (!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) {
+                               LOGGER.error("Failed to delete topi" + topicName + ". Authentication failed.");
+                               ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+                                               DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+                                               errorMessages.getCreateTopicFail() + " " + errorMessages.getNotPermitted1() + " delete "
+                                                               + errorMessages.getNotPermitted2());
                                LOGGER.info(errRes.toString());
                                throw new DMaaPAccessDeniedException(errRes);
                        }
-                       
-                       
-               }
 
-               final Broker metabroker = getMetaBroker(dmaapContext);
+               }*/
+
+               final Broker1 metabroker = getMetaBroker(dmaapContext);
                final Topic topic = metabroker.getTopic(topicName);
 
                if (topic == null) {
@@ -344,7 +379,12 @@ public class TopicServiceImpl implements TopicService {
                        throw new TopicExistsException("Failed to delete topic. Topic [" + topicName + "] does not exist.");
                }
 
-               metabroker.deleteTopic(topicName);
+                try {
+                       metabroker.deleteTopic(topicName);
+               } catch (com.att.dmf.mr.metabroker.Broker1.TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       throw new CambriaApiException(500, "failed to delete the topic");
+               }
 
                LOGGER.info("Topic [" + topicName + "] deleted successfully. Sending response.");
                DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Topic [" + topicName + "] deleted successfully");
@@ -379,8 +419,6 @@ public class TopicServiceImpl implements TopicService {
                        throw new TopicExistsException(
                                        "Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist.");
                }
-               
-               
 
                final NsaAcl acl = topic.getWriterAcl();
 
@@ -453,40 +491,40 @@ public class TopicServiceImpl implements TopicService {
 
        /**
         * @param dmaapContext
-        * @param topicName
-        * @param producerId
-        * @throws ConfigDbException
-        * @throws IOException
-        * @throws TopicExistsException
-        * @throws AccessDeniedException
-        * @throws  
+        *                      @param topicName @param producerId @throws
+        *            ConfigDbException @throws IOException @throws
+        *            TopicExistsException @throws AccessDeniedException @throws
         * 
         */
        @Override
        public void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
-                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException {
+                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, CambriaApiException {
 
                LOGGER.info("Granting write access to producer [" + producerId + "] for topic " + topicName);
                final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-               
-//             if (user == null) {
-//                     
-//                     LOGGER.info("Authenticating the user, as ACL authentication is not provided");
-////                   String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-//                     
-//                     DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-//                     String permission = aaf.aafPermissionString(topicName, "manage");
-//                     if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-//                     {
-//                             LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic " + topicName
-//                                                                     + ". Authentication failed.");
-//                             ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-//                                             DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-//                                             errorMessages.getNotPermitted1()+" <Grant publish permissions> "+errorMessages.getNotPermitted2()+ topicName);
-//                             LOGGER.info(errRes);
-//                             throw new DMaaPAccessDeniedException(errRes);
-//                     }
-//             }
+
+               // if (user == null) {
+               //
+               // LOGGER.info("Authenticating the user, as ACL authentication is not
+               // provided");
+               //// String permission =
+               // "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+               //
+               // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+               // String permission = aaf.aafPermissionString(topicName, "manage");
+               // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+               // {
+               // LOGGER.error("Failed to permit write access to producer [" +
+               // producerId + "] for topic " + topicName
+               // + ". Authentication failed.");
+               // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+               // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+               // errorMessages.getNotPermitted1()+" <Grant publish permissions>
+               // "+errorMessages.getNotPermitted2()+ topicName);
+               // LOGGER.info(errRes);
+               // throw new DMaaPAccessDeniedException(errRes);
+               // }
+               // }
 
                Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
 
@@ -513,32 +551,36 @@ public class TopicServiceImpl implements TopicService {
         * @throws IOException
         * @throws TopicExistsException
         * @throws AccessDeniedException
-        * @throws DMaaPAccessDeniedException 
+        * @throws DMaaPAccessDeniedException
         * 
         */
        @Override
        public void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
-                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
+                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,
+                       DMaaPAccessDeniedException {
 
                LOGGER.info("Revoking write access to producer [" + producerId + "] for topic " + topicName);
                final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-//             if (user == null) {
-//                     
-////                   String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-//                     DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-//                     String permission = aaf.aafPermissionString(topicName, "manage");
-//                     if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-//                     {
-//                             LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic " + topicName
-//                                             + ". Authentication failed.");
-//                             ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-//                                             DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-//                                             errorMessages.getNotPermitted1()+" <Revoke publish permissions> "+errorMessages.getNotPermitted2()+ topicName);
-//                             LOGGER.info(errRes);
-//                             throw new DMaaPAccessDeniedException(errRes);
-//                             
-//                     }
-//             }
+               // if (user == null) {
+               //
+               //// String permission =
+               // "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+               // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+               // String permission = aaf.aafPermissionString(topicName, "manage");
+               // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+               // {
+               // LOGGER.error("Failed to revoke write access to producer [" +
+               // producerId + "] for topic " + topicName
+               // + ". Authentication failed.");
+               // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+               // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+               // errorMessages.getNotPermitted1()+" <Revoke publish permissions>
+               // "+errorMessages.getNotPermitted2()+ topicName);
+               // LOGGER.info(errRes);
+               // throw new DMaaPAccessDeniedException(errRes);
+               //
+               // }
+               // }
 
                Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
 
@@ -561,30 +603,34 @@ public class TopicServiceImpl implements TopicService {
         * @param dmaapContext
         * @param topicName
         * @param consumerId
-        * @throws DMaaPAccessDeniedException 
+        * @throws DMaaPAccessDeniedException
         */
        @Override
        public void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
-                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
+                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,
+                       DMaaPAccessDeniedException {
 
                LOGGER.info("Granting read access to consumer [" + consumerId + "] for topic " + topicName);
                final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-//             if (user == null) {
-//                     
-////                   String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-//                     DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-//                     String permission = aaf.aafPermissionString(topicName, "manage");
-//                     if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-//                     {
-//                             LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic " + topicName
-//                                             + ". Authentication failed.");
-//                             ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-//                                             DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-//                                             errorMessages.getNotPermitted1()+" <Grant consume permissions> "+errorMessages.getNotPermitted2()+ topicName);
-//                             LOGGER.info(errRes);
-//                             throw new DMaaPAccessDeniedException(errRes);
-//                     }
-//             }
+               // if (user == null) {
+               //
+               //// String permission =
+               // "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+               // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+               // String permission = aaf.aafPermissionString(topicName, "manage");
+               // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+               // {
+               // LOGGER.error("Failed to permit read access to consumer [" +
+               // consumerId + "] for topic " + topicName
+               // + ". Authentication failed.");
+               // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+               // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+               // errorMessages.getNotPermitted1()+" <Grant consume permissions>
+               // "+errorMessages.getNotPermitted2()+ topicName);
+               // LOGGER.info(errRes);
+               // throw new DMaaPAccessDeniedException(errRes);
+               // }
+               // }
 
                Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
 
@@ -607,31 +653,35 @@ public class TopicServiceImpl implements TopicService {
         * @param dmaapContext
         * @param topicName
         * @param consumerId
-        * @throws DMaaPAccessDeniedException 
+        * @throws DMaaPAccessDeniedException
         */
        @Override
        public void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
-                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
+                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,
+                       DMaaPAccessDeniedException {
 
                LOGGER.info("Revoking read access to consumer [" + consumerId + "] for topic " + topicName);
                final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-//             if (user == null) {
-////                   String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-//                     DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-//                     String permission = aaf.aafPermissionString(topicName, "manage");
-//                     if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-//                     {
-//                             LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic " + topicName
-//                                             + ". Authentication failed.");
-//                             ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-//                                             DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-//                                             errorMessages.getNotPermitted1()+" <Grant consume permissions> "+errorMessages.getNotPermitted2()+ topicName);
-//                             LOGGER.info(errRes);
-//                             throw new DMaaPAccessDeniedException(errRes);
-//                     }
-//                     
-//                     
-//             }
+               // if (user == null) {
+               //// String permission =
+               // "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+               // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+               // String permission = aaf.aafPermissionString(topicName, "manage");
+               // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+               // {
+               // LOGGER.error("Failed to revoke read access to consumer [" +
+               // consumerId + "] for topic " + topicName
+               // + ". Authentication failed.");
+               // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+               // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+               // errorMessages.getNotPermitted1()+" <Grant consume permissions>
+               // "+errorMessages.getNotPermitted2()+ topicName);
+               // LOGGER.info(errRes);
+               // throw new DMaaPAccessDeniedException(errRes);
+               // }
+               //
+               //
+               // }
 
                Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
 
@@ -651,8 +701,4 @@ public class TopicServiceImpl implements TopicService {
 
        }
 
-
-       
-       
-       
 }
@@ -8,27 +8,27 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
 
 import java.io.IOException;
 
 import org.springframework.stereotype.Service;
 
 import com.att.aft.dme2.internal.jettison.json.JSONException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.service.TransactionService;
-import com.att.nsa.cambria.transaction.TransactionObj;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.service.TransactionService;
+import com.att.dmf.mr.transaction.TransactionObj;
 import com.att.nsa.configs.ConfigDbException;
 
 /**
@@ -36,7 +36,7 @@ import com.att.nsa.configs.ConfigDbException;
  * the transaction details like fetching all the transactional objects or get
  * any particular transaction object details
  * 
- * @author author
+ * @author nilanjana.maity
  *
  */
 @Service
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
 
 import java.io.IOException;
 import java.util.LinkedList;
@@ -27,25 +27,24 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
+//import kafka.common.TopicExistsException;
+import org.apache.kafka.common.errors.TopicExistsException;
 import org.json.JSONArray;
 import org.json.JSONObject;
 import org.springframework.stereotype.Service;
 
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker;
-import com.att.nsa.cambria.metabroker.Topic;
-import com.att.nsa.cambria.service.UIService;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.service.UIService;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.security.db.NsaApiDb;
 import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-
-import kafka.common.TopicExistsException;
-
 /**
- * @author author
+ * @author muzainulhaque.qazi
  *
  */
 @Service
@@ -115,7 +114,7 @@ public class UIServiceImpl implements UIService {
                        DMaaPResponseBuilder.respondOk(dmaapContext, key.asJsonObject());
                } else {
                        LOGGER.info("Details of apikey [" + apiKey + "] not found. Returning response");
-                       throw new IOException("Key [" + apiKey + "] not found.");
+                       throw new Exception("Key [" + apiKey + "] not found.");
                }
 
        }
@@ -8,21 +8,21 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.transaction;
+package com.att.dmf.mr.transaction;
 /**
  * 
- * @author author
+ * @author anowarul.islam
  *
  * @param <K>
  */
@@ -8,23 +8,23 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.transaction;
+package com.att.dmf.mr.transaction;
 
 import org.json.JSONObject;
 /**
  * This is an interface for DMaaP transactional logging object class.
- * @author author
+ * @author nilanjana.maity
  *
  */
 public interface DMaaPTransactionObj {
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.transaction;
+package com.att.dmf.mr.transaction;
 
 import java.util.Set;
 
@@ -30,7 +30,7 @@ import com.att.nsa.security.NsaSecurityManagerException;
 /**
  * Persistent storage for Transaction Object and secrets built over an abstract config db. Instances
  * of this DB must support concurrent access.
- * @author author
+ * @author nilanjana.maity
  *
  * @param <K> DMaaPTransactionObj
  */
@@ -48,7 +48,7 @@ public interface DMaaPTransactionObjDB <K extends DMaaPTransactionObj> {
 
        /**
         * An exception to signal a Transaction object already exists 
-        * @author author
+        * @author nilanjana.maity
         *
         */
        public static class KeyExistsException extends NsaSecurityManagerException
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.transaction;
+package com.att.dmf.mr.transaction;
 
 import org.json.JSONObject;
 
@@ -27,7 +27,7 @@ import org.json.JSONObject;
  * This is the class which will have the transaction enabled logging object
  * details
  * 
- * @author author
+ * @author nilanjana.maity
  *
  */
 public class TransactionObj implements DMaaPTransactionObj {
@@ -8,24 +8,24 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.transaction;
+package com.att.dmf.mr.transaction;
 
 /**
  * Created for transaction enable logging details, this is nothing but a bean
  * class.
  * 
- * @author author
+ * @author nilanjana.maity
  *
  */
 public class TrnRequest {
@@ -8,30 +8,30 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.transaction.impl;
+package com.att.dmf.mr.transaction.impl;
 
 import org.json.JSONObject;
 
-import com.att.nsa.cambria.transaction.DMaaPTransactionFactory;
-import com.att.nsa.cambria.transaction.DMaaPTransactionObj;
-import com.att.nsa.cambria.transaction.TransactionObj;
+import com.att.dmf.mr.transaction.DMaaPTransactionFactory;
+import com.att.dmf.mr.transaction.DMaaPTransactionObj;
+import com.att.dmf.mr.transaction.TransactionObj;
 
 /**
  * A factory for the simple Transaction implementation
  * 
  * 
- * @author author
+ * @author nilanjana.maity
  *
  */
 public class DMaaPSimpleTransactionFactory implements DMaaPTransactionFactory<DMaaPTransactionObj> {
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.utils;
+package com.att.dmf.mr.utils;
 
 import javax.servlet.ServletException;
 
@@ -28,25 +28,26 @@ import org.apache.curator.framework.CuratorFramework;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Component;
+
+import com.att.dmf.mr.backends.ConsumerFactory;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.dmf.mr.backends.Publisher;
+import com.att.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
+import com.att.dmf.mr.backends.memory.MemoryConsumerFactory;
+import com.att.dmf.mr.backends.memory.MemoryMetaBroker;
+import com.att.dmf.mr.backends.memory.MemoryQueue;
+import com.att.dmf.mr.backends.memory.MemoryQueuePublisher;
+import com.att.dmf.mr.beans.DMaaPCambriaLimiter;
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import com.att.dmf.mr.beans.DMaaPZkConfigDb;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.metabroker.Broker;
+
+import com.att.dmf.mr.metabroker.Broker1;
+import com.att.dmf.mr.security.DMaaPAuthenticator;
+import com.att.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
-//import com.att.nsa.apiServer.util.Emailer;
-import com.att.nsa.cambria.utils.Emailer;
-import com.att.nsa.cambria.backends.ConsumerFactory;
-import com.att.nsa.cambria.backends.MetricsSet;
-import com.att.nsa.cambria.backends.Publisher;
-import com.att.nsa.cambria.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
-import com.att.nsa.cambria.backends.memory.MemoryConsumerFactory;
-import com.att.nsa.cambria.backends.memory.MemoryMetaBroker;
-import com.att.nsa.cambria.backends.memory.MemoryQueue;
-import com.att.nsa.cambria.backends.memory.MemoryQueuePublisher;
-//import com.att.nsa.cambria.beans.DMaaPBlacklist;
-import com.att.nsa.cambria.beans.DMaaPCambriaLimiter;
-import com.att.nsa.cambria.beans.DMaaPZkConfigDb;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.metabroker.Broker;
-import com.att.nsa.cambria.security.DMaaPAuthenticator;
-import com.att.nsa.cambria.security.impl.DMaaPOriginalUebAuthenticator;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.configs.confimpl.MemConfigDb;
 import com.att.nsa.drumlin.till.nv.rrNvReadable;
@@ -70,7 +71,7 @@ import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
 public class ConfigurationReader {
 
 //     private rrNvReadable settings;
-       private Broker fMetaBroker;
+       private Broker1 fMetaBroker;
        private ConsumerFactory fConsumerFactory;
        private Publisher fPublisher;
        private MetricsSet fMetrics;
@@ -118,7 +119,8 @@ public class ConfigurationReader {
                        @Qualifier("dMaaPZkConfigDb") DMaaPZkConfigDb fConfigDb, @Qualifier("kafkaPublisher") Publisher fPublisher,
                        @Qualifier("curator") CuratorFramework curator,
                        @Qualifier("dMaaPKafkaConsumerFactory") ConsumerFactory fConsumerFactory,
-                       @Qualifier("dMaaPKafkaMetaBroker") Broker fMetaBroker, @Qualifier("q") MemoryQueue q,
+                       @Qualifier("dMaaPKafkaMetaBroker") Broker1 fMetaBroker,
+                       @Qualifier("q") MemoryQueue q,
                        @Qualifier("mmb") MemoryMetaBroker mmb, @Qualifier("dMaaPNsaApiDb") NsaApiDb<NsaSimpleApiKey> fApiKeyDb,
                        /*
                         * @Qualifier("dMaaPTranDb")
@@ -135,6 +137,7 @@ public class ConfigurationReader {
                ConfigurationReader.curator = curator;
                this.fConsumerFactory = fConsumerFactory;
                this.fMetaBroker = fMetaBroker;
+               //System.out.println("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSs  " + fMetaBroker);
                this.q = q;
                this.mmb = mmb;
                this.fApiKeyDb = fApiKeyDb;
@@ -160,7 +163,6 @@ public class ConfigurationReader {
 
                        fMetrics.toJson();
                        fMetrics.setupCambriaSender();
-
                        // add the admin authenticator
                                //              final String adminSecret = settings.getString ( CambriaConstants.kSetting_AdminSecret, null );
                                                final String adminSecret = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_AdminSecret);
@@ -175,33 +177,31 @@ public class ConfigurationReader {
                                                                //nsaSecurityManager.addAuthenticator ( new OriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
                                                        fSecurityManager.addAuthenticator ( new DMaaPOriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
                                                        }
+                                               
                                                        catch ( KeyExistsException e )
                                                        {
                                                                throw new RuntimeException ( "This key can't exist in a fresh in-memory DB!", e );
                                                        }
                                                }
-                                               
+                                       
                        // setup a backend
                        //final String type = settings.getString(CambriaConstants.kBrokerType, CambriaConstants.kBrokerType_Kafka);
                         String type = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kBrokerType);
                        if (type==null) type = CambriaConstants.kBrokerType_Kafka;
                        if (CambriaConstants.kBrokerType_Kafka.equalsIgnoreCase(type)) {
                                log.info("Broker Type is:" + CambriaConstants.kBrokerType_Kafka);
-
                        } else if (CambriaConstants.kBrokerType_Memory.equalsIgnoreCase(type)) {
                                log.info("Broker Type is:" + CambriaConstants.kBrokerType_Memory);
-
                                fPublisher = new MemoryQueuePublisher(q, mmb);
-                               fMetaBroker = mmb;
+                               //Ramkumar remove below
+                       //      fMetaBroker = mmb;
                                fConsumerFactory = new MemoryConsumerFactory(q);
                        } else {
                                throw new IllegalArgumentException(
                                                "Unrecognized type for " + CambriaConstants.kBrokerType + ": " + type + ".");
                        }
-                       
                        fIpBlackList = new Blacklist ( getfConfigDb(), getfConfigDb().parse ( "/ipBlacklist" ) );
                        this.fEmailer = new Emailer();
-                       
                        log.info("Broker Type is:" + type);
 
                } catch (SecurityException e) {
@@ -214,7 +214,7 @@ public class ConfigurationReader {
         * 
         * @return
         */
-       public Broker getfMetaBroker() {
+       public Broker1 getfMetaBroker() {
                return fMetaBroker;
        }
 
@@ -223,7 +223,7 @@ public class ConfigurationReader {
         * 
         * @param fMetaBroker
         */
-       public void setfMetaBroker(Broker fMetaBroker) {
+       public void setfMetaBroker(Broker1 fMetaBroker) {
                this.fMetaBroker = fMetaBroker;
        }
 
@@ -459,10 +459,9 @@ public class ConfigurationReader {
        public static String getMainZookeeperConnectionString() {
                //return settings.getString(CambriaConstants.kSetting_ZkConfigDbServers,                        CambriaConstants.kDefault_ZkConfigDbServers);
                
-                String typeVal = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
-                if (typeVal==null) typeVal=CambriaConstants.kDefault_ZkConfigDbServers;
-                
-                return typeVal;
+                String zkServername = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
+                if (zkServername==null) zkServername=CambriaConstants.kDefault_ZkConfigDbServers;
+                return zkServername;
        }
 
        public static String getMainZookeeperConnectionSRoot(){
@@ -8,29 +8,30 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.utils;
+package com.att.dmf.mr.utils;
 
 import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
 import org.apache.curator.retry.ExponentialBackoffRetry;
-import com.att.nsa.cambria.constants.CambriaConstants;
+
+import com.att.dmf.mr.constants.CambriaConstants;
 import com.att.nsa.drumlin.till.nv.rrNvReadable;
 
 /**
  * 
  * 
- * @author author
+ * @author anowarul.islam
  *
  *
  */
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.utils;
+package com.att.dmf.mr.utils;
 
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
@@ -29,17 +29,18 @@ import java.io.PrintWriter;
 import java.io.Writer;
 
 import javax.servlet.http.HttpServletResponse;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
+
 import org.json.JSONException;
 import org.json.JSONObject;
 
-import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
 
 /**
  * class is used to create response object which is given to user
  * 
- * @author author
+ * @author nilanjana.maity
  *
  */
 
@@ -128,11 +129,12 @@ public class DMaaPResponseBuilder {
         * @throws IOException
         */
        public static void respondOkWithStream(DMaaPContext ctx, String mediaType, StreamWriter writer) throws IOException {
-
                ctx.getResponse().setStatus(200);
                OutputStream os = getStreamForBinaryResponse(ctx, mediaType);
                writer.write(os);
-
+               os.close();
+               
+               
        }
 
        /**
@@ -247,21 +249,37 @@ public class DMaaPResponseBuilder {
         */
        public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx, String contentType) throws IOException {
                ctx.getResponse().setContentType(contentType);
+               
 
                boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD")));
 
                OutputStream os = null;
+               try{
                if (fResponseEntityAllowed) {
                        os = ctx.getResponse().getOutputStream();
+                       return os;
                } else {
                        os = new NullStream();
+                       return os;
+               }
+               }catch (Exception e){
+                       throw new IOException();
+                       
+               }
+               finally{
+                       if(null != os){
+                               try{
+                                       os.close();
+                               }catch(Exception e) {
+                                        
+                               }
+                       }
                }
-               return os;
        }
 
        /**
         * 
-        * @author author
+        * @author anowarul.islam
         *
         */
        private static class NullStream extends OutputStream {
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.utils;
+package com.att.dmf.mr.utils;
 
 import java.io.IOException;
 import java.util.Properties;
@@ -41,15 +41,14 @@ import javax.mail.internet.MimeMultipart;
 //import org.slf4j.LoggerFactory;
 
 import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.dmf.mr.constants.CambriaConstants;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
 
 /**
  * Send an email from a message.
  * 
- * @author author
+ * @author peter
  */
 public class Emailer
 {
@@ -8,36 +8,28 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.utils;
+package com.att.dmf.mr.utils;
 
-import java.io.File;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.HashMap;
 import java.util.Map;
 
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
-
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.drumlin.till.nv.impl.nvPropertiesFile;
 import com.att.nsa.drumlin.till.nv.impl.nvReadableStack;
-import com.att.nsa.drumlin.till.nv.impl.nvReadableTable;
 
 /**
  * 
- * @author 
+ * @author nilesh.labde
  *
  *
  */
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.cambria.utils;
+package com.att.dmf.mr.utils;
 
 import java.text.DecimalFormat;
 import java.text.SimpleDateFormat;
@@ -30,10 +30,10 @@ import java.util.List;
 
 import javax.servlet.http.HttpServletRequest;
 
-import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPContext;
 /**
  * This is an utility class for various operations for formatting
- * @author author
+ * @author nilanjana.maity
  *
  */
 public class Utils {
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.apiServer.metrics.cambria;
+package com.att.mr.apiServer.metrics.cambria;
 
 import java.io.IOException;
 import java.net.InetAddress;
@@ -35,12 +35,12 @@ import org.json.JSONObject;
 //import org.slf4j.Logger;
 //import org.slf4j.LoggerFactory;
 
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.metrics.publisher.CambriaPublisher;
+import com.att.dmf.mr.metrics.publisher.DMaaPCambriaClientFactory;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.metrics.publisher.CambriaPublisher;
-import com.att.nsa.cambria.metrics.publisher.DMaaPCambriaClientFactory;
-
+import com.att.nsa.apiServer.metrics.cambria.MetricsSender;
 import com.att.nsa.metrics.CdmMetricsRegistry;
 import com.att.nsa.metrics.impl.CdmConstant;
 
@@ -48,7 +48,7 @@ import com.att.nsa.metrics.impl.CdmConstant;
  * MetricsSender will send the given metrics registry content as an event on the
  * Cambria event broker to the given topic.
  * 
- * @author author
+ * @author peter
  *
  */
 public class DMaaPMetricsSender implements Runnable {
@@ -77,16 +77,16 @@ public class DMaaPMetricsSender implements Runnable {
        boolean setEnable=true;
        if (cambriaSetting!=null && cambriaSetting.equals("false") )
        setEnable= false;
+
                if (setEnable) {
                        String Setting_CambriaBaseUrl=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled);
                        
                        Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?"localhost":Setting_CambriaBaseUrl;
                        
                        String Setting_CambriaTopic=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaTopic);
-                       if(Setting_CambriaTopic==null) {
-                               Setting_CambriaTopic = "msgrtr.apinode.metrics.dmaap";   
-                               }
+                       if(Setting_CambriaTopic==null) Setting_CambriaTopic = "msgrtr.apinode.metrics.dmaap";     
                        
+       //              Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?defaultTopic:Setting_CambriaBaseUrl;
                        
                        String Setting_CambriaSendFreqSecs=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaSendFreqSecs);
                        
@@ -145,7 +145,7 @@ public class DMaaPMetricsSender implements Runnable {
                        fCambria = DMaaPCambriaClientFactory.createSimplePublisher(cambriaBaseUrl, topic);
                } catch (UnknownHostException e) {
                        log.warn("Unable to get localhost address in MetricsSender constructor.", e);
-                       throw new IllegalArgumentException(e);
+                       throw new RuntimeException(e);
                }
        }
 
@@ -161,9 +161,9 @@ public class DMaaPMetricsSender implements Runnable {
                        o.put("transactionEnabled", false);
                        fCambria.send(fHostname, o.toString());
                } catch (JSONException e) {
-                       log.error("Error posting metrics to Cambria at send(): " + e);
+                       log.warn("Error posting metrics to Cambria: " + e.getMessage());
                } catch (IOException e) {
-                       log.error("Error posting metrics to Cambria at send(): " + e );
+                       log.warn("Error posting metrics to Cambria: " + e.getMessage());
                }
        }
 
@@ -179,7 +179,7 @@ public class DMaaPMetricsSender implements Runnable {
        private final CambriaPublisher fCambria;
        private final String fHostname;
 
-
+       //private static final Logger log = LoggerFactory.getLogger(MetricsSender.class);
 
        private static final EELFLogger log = EELFManager.getInstance().getLogger(MetricsSender.class);
        /**
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.filter;
+package com.att.mr.filter;
 
 import java.io.IOException;
 
@@ -32,16 +32,16 @@ import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
 
 import org.apache.http.HttpStatus;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
 import org.json.JSONObject;
 import org.springframework.context.ApplicationContext;
 import org.springframework.web.context.support.WebApplicationContextUtils;
 
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;
-import com.att.nsa.cambria.exception.DMaaPResponseCode;
-import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.exception.DMaaPErrorMessages;
+import com.att.dmf.mr.exception.DMaaPResponseCode;
+import com.att.dmf.mr.exception.ErrorResponse;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
 
 /**
  * Servlet Filter implementation class ContentLengthFilter
@@ -105,7 +105,7 @@ public class ContentLengthFilter implements Filter {
                                chain.doFilter(req, res);
                        }
                } catch (CambriaApiException | NumberFormatException e) {
-                       log.error("message size is greater then default:" + e);
+                       log.error("message size is greater then default");
                        ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED,
                                        DMaaPResponseCode.MSG_SIZE_EXCEEDS_MSG_LIMIT.getResponseCode(), errorMessages.getMsgSizeExceeds()
                                                        + jsonObj.toString());
@@ -8,18 +8,18 @@
  *  you may not use this file except in compliance with the License.
  *  You may obtain a copy of the License at
  *        http://www.apache.org/licenses/LICENSE-2.0
- *  
+*  
  *  Unless required by applicable law or agreed to in writing, software
  *  distributed under the License is distributed on an "AS IS" BASIS,
  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  ============LICENSE_END=========================================================
- *
+ *  
  *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
  *  
  *******************************************************************************/
-package com.att.nsa.filter;
+package com.att.mr.filter;
 
 
 public class DefaultLength {
diff --git a/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumer.java b/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumer.java
deleted file mode 100644 (file)
index 692f093..0000000
+++ /dev/null
@@ -1,245 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package com.att.nsa.cambria.backends.kafka;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import kafka.consumer.ConsumerIterator;
-import kafka.consumer.KafkaStream;
-import kafka.javaapi.consumer.ConsumerConnector;
-import kafka.message.MessageAndMetadata;
-
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-import com.att.nsa.cambria.backends.Consumer;
-
-/**
- * A consumer instance that's created per-request. These are stateless so that
- * clients can connect to this service as a proxy.
- * 
- * @author author
- *
- */
-public class KafkaConsumer implements Consumer {
-       private enum State {
-               OPENED, CLOSED
-       }
-
-       /**
-        * KafkaConsumer() is constructor. It has following 4 parameters:-
-        * @param topic
-        * @param group
-        * @param id
-        * @param cc
-        * 
-        */
-       
-       public KafkaConsumer(String topic, String group, String id, ConsumerConnector cc) {
-               fTopic = topic;
-               fGroup = group;
-               fId = id;
-               fConnector = cc;
-
-               fCreateTimeMs = System.currentTimeMillis();
-               fLastTouch = fCreateTimeMs;
-
-               fLogTag = fGroup + "(" + fId + ")/" + fTopic;
-               offset = 0;
-
-               state = KafkaConsumer.State.OPENED;
-
-               final Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
-               topicCountMap.put(fTopic, 1);
-               final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = fConnector
-                               .createMessageStreams(topicCountMap);
-               final List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(fTopic);
-               fStream = streams.iterator().next();
-       }
-
-       
-       /** getName() method returns string type value.
-        * returns 3 parameters in string:- 
-        * fTopic,fGroup,fId
-        * @Override
-        */
-       public String getName() {
-               return fTopic + " : " + fGroup + " : " + fId;
-       }
-
-       /** getCreateTimeMs() method returns long type value.
-        * returns fCreateTimeMs variable value 
-        * @Override
-        * 
-        */
-       public long getCreateTimeMs() {
-               return fCreateTimeMs;
-       }
-
-       /** getLastAccessMs() method returns long type value.
-        * returns fLastTouch variable value 
-        * @Override
-        * 
-        */
-       public long getLastAccessMs() {
-               return fLastTouch;
-       }
-
-       
-       /** 
-        * nextMessage() is synchronized method that means at a time only one object can access it.
-        * getName() method returns String which is of type Consumer.Message
-        * @Override
-        * */
-       public synchronized Consumer.Message nextMessage() {
-               if (getState() == KafkaConsumer.State.CLOSED) {
-                       log.warn("nextMessage() called on closed KafkaConsumer " + getName());
-                       return null;
-               }
-
-               try {
-                       ConsumerIterator<byte[], byte[]> it = fStream.iterator();
-                       if (it.hasNext()) {
-                               final MessageAndMetadata<byte[], byte[]> msg = it.next();
-                               offset = msg.offset();
-
-                               return new Consumer.Message() {
-                                       @Override
-                                       public long getOffset() {
-                                               return msg.offset();
-                                       }
-
-                                       @Override
-                                       public String getMessage() {
-                                               return new String(msg.message());
-                                       }
-                               };
-                       }
-               } catch (kafka.consumer.ConsumerTimeoutException x) {
-                       log.error(fLogTag + ": ConsumerTimeoutException in Kafka consumer; returning null & Exception at nextMessage() : " + x);
-               } catch (java.lang.IllegalStateException x) {
-                       log.error(fLogTag + ": Error found next() at : " + x);
-               }
-
-               return null;
-       }
-       
-       /** getOffset() method returns long type value.
-        * returns offset variable value 
-        * @Override
-        * 
-        */
-       public long getOffset() {
-               return offset;
-       }
-
-       /** commit offsets 
-        * commitOffsets() method will be called on closed of KafkaConsumer.
-        * @Override
-        * 
-        */
-       public void commitOffsets() {
-               if (getState() == KafkaConsumer.State.CLOSED) {
-                       log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
-                       return;
-               }
-               fConnector.commitOffsets();
-       }
-
-       /**
-        * updating fLastTouch with current time in ms
-        */
-       public void touch() {
-               fLastTouch = System.currentTimeMillis();
-       }
-       
-       /** getLastTouch() method returns long type value.
-        * returns fLastTouch variable value
-        * 
-        */
-       public long getLastTouch() {
-               return fLastTouch;
-       }
-
-       /**
-        *   setting the kafkaConsumer state to closed
-        */
-       public synchronized void close() {
-               if (getState() == KafkaConsumer.State.CLOSED) {
-                       log.warn("close() called on closed KafkaConsumer " + getName());
-                       return;
-               }
-
-               setState(KafkaConsumer.State.CLOSED);
-               fConnector.shutdown();
-       }
-       
-       /**
-        * getConsumerGroup() returns Consumer group
-        * @return
-        */
-       public String getConsumerGroup() {
-               return fGroup;
-       }
-       
-       /**
-        * getConsumerId returns Consumer Id
-        * @return
-        */
-       public String getConsumerId() {
-               return fId;
-       }
-
-       /**
-        * getState returns kafkaconsumer state
-        * @return
-        */     
-       private KafkaConsumer.State getState() {
-               return this.state;
-       }
-       
-       /**
-        * setState() sets the kafkaConsumer state
-        * @param state
-        */
-       private void setState(KafkaConsumer.State state) {
-               this.state = state;
-       }
-
-       private ConsumerConnector fConnector;
-       private final String fTopic;
-       private final String fGroup;
-       private final String fId;
-       private final String fLogTag;
-       private final KafkaStream<byte[], byte[]> fStream;
-       private long fCreateTimeMs;
-       private long fLastTouch;
-       private long offset;
-       private KafkaConsumer.State state;
-       private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumer.class);
-       //private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
-}
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MessageDropper.java b/src/main/java/com/att/nsa/cambria/backends/memory/MessageDropper.java
deleted file mode 100644 (file)
index c49ac4f..0000000
+++ /dev/null
@@ -1,61 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package com.att.nsa.cambria.backends.memory;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import com.att.nsa.cambria.backends.Publisher;
-
-import kafka.producer.KeyedMessage;
-
-/**
- * class is used to message publishing
- * 
- * @author author
- *
- */
-public class MessageDropper implements Publisher {
-       /**
-        * publish single messages
-        * param topic
-        * param msg
-        */
-       @Override
-       public void sendMessage(String topic, message msg) throws IOException {
-       }
-
-       /**
-        * publish multiple messages
-        */
-       @Override
-       public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
-       }
-
-       /**
-        * publish batch messages
-        */
-       @Override
-       public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
-       }
-}
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java b/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java
deleted file mode 100644 (file)
index 63f1dd5..0000000
+++ /dev/null
@@ -1,323 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package com.att.nsa.cambria.beans;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Properties;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.recipes.locks.InterProcessMutex;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.ConsumerFactory;
-import com.att.nsa.cambria.backends.MetricsSet;
-import com.att.nsa.cambria.backends.kafka.KafkaConsumer;
-import com.att.nsa.cambria.backends.kafka.KafkaConsumerCache;
-import com.att.nsa.cambria.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.utils.ConfigurationReader;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import kafka.consumer.ConsumerConfig;
-import kafka.javaapi.consumer.ConsumerConnector;
-
-/**
- * @author author
- *
- */
-public class DMaaPKafkaConsumerFactory implements ConsumerFactory {
-
-       //private static final Logger log = LoggerFactory                       .getLogger(DMaaPKafkaConsumerFactory.class);
-       private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPKafkaConsumerFactory.class);
-       /**
-        * constructor initialization
-        * 
-        * @param settings
-        * @param metrics
-        * @param curator
-        * @throws missingReqdSetting
-        * @throws KafkaConsumerCacheException
-        * @throws UnknownHostException
-        */
-       public DMaaPKafkaConsumerFactory(
-                       @Qualifier("propertyReader") rrNvReadable settings,
-                       @Qualifier("dMaaPMetricsSet") MetricsSet metrics,
-                       @Qualifier("curator") CuratorFramework curator)
-                       throws missingReqdSetting, KafkaConsumerCacheException,
-                       UnknownHostException {
-               /*final String apiNodeId = settings.getString(
-                               CambriaConstants.kSetting_ApiNodeIdentifier,
-                               InetAddress.getLocalHost().getCanonicalHostName()
-                                               + ":"
-                                               + settings.getInt(CambriaConstants.kSetting_Port,
-                                                               CambriaConstants.kDefault_Port));*/
-                String apiNodeId = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
-               CambriaConstants.kSetting_ApiNodeIdentifier);
-               if (apiNodeId == null){
-                       
-                       apiNodeId=InetAddress.getLocalHost().getCanonicalHostName()
-                       + ":"
-                       + settings.getInt(CambriaConstants.kSetting_Port,
-                                       CambriaConstants.kDefault_Port);
-               }
-               
-               log.info("This Cambria API Node identifies itself as [" + apiNodeId
-                               + "].");
-               final String mode = CambriaConstants.DMAAP;
-               /*fSettings = settings;
-               fZooKeeper = fSettings.getString(kSettings_KafkaZookeeper, settings
-                               .getString(CambriaConstants.kSetting_ZkConfigDbServers,
-                                               CambriaConstants.kDefault_ZkConfigDbServers));*/
-
-               String strkSettings_KafkaZookeeper = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
-               if(null==strkSettings_KafkaZookeeper){
-                        strkSettings_KafkaZookeeper = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
-                       if (null==strkSettings_KafkaZookeeper) strkSettings_KafkaZookeeper = CambriaConstants.kDefault_ZkConfigDbServers;
-                       
-               }
-               fZooKeeper=  strkSettings_KafkaZookeeper;
-               
-               //final boolean isCacheEnabled = fSettings.getBoolean(
-                       //      kSetting_EnableCache, kDefault_IsCacheEnabled);
-               boolean kSetting_EnableCache= kDefault_IsCacheEnabled;
-               String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_EnableCache+"");
-               if(null!=strkSetting_EnableCache)
-                       {
-                               kSetting_EnableCache=Boolean.parseBoolean(strkSetting_EnableCache);
-                       }
-                       
-                               final boolean isCacheEnabled = kSetting_EnableCache;
-                               
-                               
-               fCache = (isCacheEnabled) ? new KafkaConsumerCache(apiNodeId, 
-                               metrics) : null;
-               if (fCache != null) {
-                       fCache.startCache(mode, curator);
-               }
-       }
-
-       @Override
-       public Consumer getConsumerFor(String topic, String consumerGroupName,
-                       String consumerId, int timeoutMs) throws UnavailableException {
-               KafkaConsumer kc;
-
-               try {
-                       kc = (fCache != null) ? fCache.getConsumerFor(topic,
-                                       consumerGroupName, consumerId) : null;
-               } catch (KafkaConsumerCacheException e) {
-                       throw new UnavailableException(e);
-               }
-
-               if (kc == null) {
-                       
-                       final InterProcessMutex ipLock = new InterProcessMutex( ConfigurationReader.getCurator(), "/consumerFactory/" + topic + "/" + consumerGroupName + "/" + consumerId);
-//                     final InterProcessMutex fLock = new InterProcessMutex(
-//                                     ConfigurationReader.getCurator(), "/consumerFactory/"
-//                                                     + topic + "/" + consumerGroupName + "/"
-//                                                     + consumerId);
-                       boolean locked = false;
-                       try {
-                       
-                               locked = ipLock.acquire(30, TimeUnit.SECONDS);
-                               if (!locked) {
-                                       // FIXME: this seems to cause trouble in some cases. This exception
-                                       // gets thrown routinely. Possibly a consumer trying multiple servers
-                                       // at once, producing a never-ending cycle of overlapping locks?
-                                       // The problem is that it throws and winds up sending a 503 to the
-                                       // client, which would be incorrect if the client is causing trouble
-                                       // by switching back and forth.
-                                       
-                                       throw new UnavailableException("Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic + ", " + consumerGroupName + ", " + consumerId + ")");
-                               }
-                               
-//                             if (!fLock.acquire(30, TimeUnit.SECONDS)) {
-//                                     throw new UnavailableException(
-//                                                     "Could not acquire lock in order to create (topic, group, consumer) = "
-//                                                                     + "(" + topic + ", " + consumerGroupName
-//                                                                     + ", " + consumerId + ")");
-//                             }
-
-                               fCache.signalOwnership(topic, consumerGroupName, consumerId);
-
-                               log.info("Creating Kafka consumer for group ["
-                                               + consumerGroupName + "], consumer [" + consumerId
-                                               + "], on topic [" + topic + "].");
-
-                               final String fakeGroupName = consumerGroupName + "--" + topic;
-
-                               final ConsumerConfig ccc = createConsumerConfig(fakeGroupName,
-                                               consumerId);
-                               final ConsumerConnector cc = kafka.consumer.Consumer
-                                               .createJavaConsumerConnector(ccc);
-                               kc = new KafkaConsumer(topic, consumerGroupName, consumerId, cc);
-
-                               if (fCache != null) {
-                                       fCache.putConsumerFor(topic, consumerGroupName, consumerId,
-                                                       kc);
-                               }
-                       } catch (org.I0Itec.zkclient.exception.ZkTimeoutException x) {
-                               log.error("Exception find at getConsumerFor(String topic, String consumerGroupName,\r\n" + 
-                                               "                       String consumerId, int timeoutMs) : " + x);
-                               throw new UnavailableException("Couldn't connect to ZK.");
-                       } catch (KafkaConsumerCacheException e) {
-                               log.error("Failed to cache consumer (this may have performance implications): "
-                                               + e.getMessage());
-                       } catch (Exception e) {
-                               throw new UnavailableException(
-                                               "Error while acquiring consumer factory lock", e);
-                       } finally {
-                               if ( locked )
-                               {
-                                       try {
-                                               ipLock.release();
-                                       } catch (Exception e) {
-                                               throw new UnavailableException("Error while releasing consumer factory lock", e);
-                                       }
-                               }       
-                       }
-               }
-
-               return kc;
-       }
-
-       @Override
-       public synchronized void destroyConsumer(String topic,
-                       String consumerGroup, String clientId) {
-               if (fCache != null) {
-                       fCache.dropConsumer(topic, consumerGroup, clientId);
-               }
-       }
-
-       @Override
-       public synchronized Collection<? extends Consumer> getConsumers() {
-               return fCache.getConsumers();
-       }
-
-       @Override
-       public synchronized void dropCache() {
-               fCache.dropAllConsumers();
-       }
-
-       private ConsumerConfig createConsumerConfig(String groupId,
-                       String consumerId) {
-               final Properties props = new Properties();
-               props.put("zookeeper.connect", fZooKeeper);
-               props.put("group.id", groupId);
-               props.put("consumer.id", consumerId);
-               //props.put("auto.commit.enable", "false");
-               // additional settings: start with our defaults, then pull in configured
-               // overrides
-               props.putAll(KafkaInternalDefaults);
-               for (String key : KafkaConsumerKeys) {
-                       transferSettingIfProvided(props, key, "kafka");
-               }
-
-               return new ConsumerConfig(props);
-       }
-
-       //private final rrNvReadable fSettings;
-       private final KafkaConsumerCache fCache;
-
-       private String fZooKeeper;
-
-       private static final String kSettings_KafkaZookeeper = "kafka.client.zookeeper";
-
-       private static final HashMap<String, String> KafkaInternalDefaults = new HashMap<String, String>();
-
-       /**
-        * putting values in hashmap like consumer timeout, zookeeper time out, etc
-        * 
-        * @param setting
-        */
-       public static void populateKafkaInternalDefaultsMap() {
-                       //@Qualifier("propertyReader") rrNvReadable setting) {
-               try {
-                       
-                       HashMap<String, String> map1= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop);
-                                       
-                       KafkaInternalDefaults.put("consumer.timeout.ms",
-                                                       //      AJSCPropertiesMap.get(CambriaConstants.msgRtr_prop, "consumer.timeout.ms"));
-                       map1.get( "consumer.timeout.ms"));
-                       
-                       KafkaInternalDefaults.put("zookeeper.connection.timeout.ms",
-                                       //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.connection.timeout.ms"));
-                                       map1.get("zookeeper.connection.timeout.ms"));
-                       KafkaInternalDefaults.put("zookeeper.session.timeout.ms",
-                                       //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.session.timeout.ms"));
-                       map1.get("zookeeper.session.timeout.ms"));
-                       KafkaInternalDefaults.put("zookeeper.sync.time.ms",
-                               //      AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.sync.time.ms"));
-                       map1.get( "zookeeper.sync.time.ms"));
-                       KafkaInternalDefaults.put("auto.commit.interval.ms",
-                                       //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "auto.commit.interval.ms"));
-                       map1.get( "auto.commit.interval.ms"));
-                       KafkaInternalDefaults.put("fetch.message.max.bytes",
-                                       //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "fetch.message.max.bytes"));
-                       map1.get("fetch.message.max.bytes"));
-                       KafkaInternalDefaults.put("auto.commit.enable",
-                       //              AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "auto.commit.enable"));
-                       map1.get("auto.commit.enable"));
-               } catch (Exception e) {
-                       log.error("Failed to load Kafka Internal Properties.", e);
-               }
-       }
-
-       private static final String KafkaConsumerKeys[] = { "socket.timeout.ms",
-                       "socket.receive.buffer.bytes", "fetch.message.max.bytes",
-                       "auto.commit.interval.ms", "queued.max.message.chunks",
-                       "rebalance.max.retries", "fetch.min.bytes", "fetch.wait.max.bytes",
-                       "rebalance.backoff.ms", "refresh.leader.backoff.ms",
-                       "auto.offset.reset", "consumer.timeout.ms",
-                       "zookeeper.session.timeout.ms", "zookeeper.connection.timeout.ms",
-                       "zookeeper.sync.time.ms" };
-
-       private static String makeLongKey(String key, String prefix) {
-               return prefix + "." + key;
-       }
-
-       private void transferSettingIfProvided(Properties target, String key,
-                       String prefix) {
-               String keyVal= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,makeLongKey(key, prefix));
-       
-       //      if (fSettings.hasValueFor(makeLongKey(key, prefix))) {
-               if (null!=keyVal) {
-       //              final String val = fSettings
-               //                      .getString(makeLongKey(key, prefix), "");
-                       log.info("Setting [" + key + "] to " + keyVal + ".");
-                       target.put(key, keyVal);
-               }
-       }
-
-       }
-
-
diff --git a/src/main/java/com/att/nsa/cambria/beans/ZkClientFactory.java b/src/main/java/com/att/nsa/cambria/beans/ZkClientFactory.java
deleted file mode 100644 (file)
index 2aedb95..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * ONAP Policy Engine
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package com.att.nsa.cambria.beans;
-
-import org.I0Itec.zkclient.ZkClient;
-
-import com.att.nsa.cambria.utils.ConfigurationReader;
-
-import kafka.utils.ZKStringSerializer$;
-
-public class ZkClientFactory {
-       
-       public static ZkClient createZkClient(){
-               return  new ZkClient(ConfigurationReader.getMainZookeeperConnectionString(), 10000, 10000,
-                               ZKStringSerializer$.MODULE$);
-
-       }
-
-}
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/EventsServiceImpl.java b/src/main/java/com/att/nsa/cambria/service/impl/EventsServiceImpl.java
deleted file mode 100644 (file)
index bb2effd..0000000
+++ /dev/null
@@ -1,795 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package com.att.nsa.cambria.service.impl;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.LinkedList;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.core.MediaType;
-
-import org.apache.http.HttpStatus;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.ConsumerFactory;
-import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
-import com.att.nsa.cambria.backends.MetricsSet;
-import com.att.nsa.cambria.backends.Publisher;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.beans.DMaaPCambriaLimiter;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.beans.LogDetails;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;
-import com.att.nsa.cambria.exception.DMaaPResponseCode;
-import com.att.nsa.cambria.exception.ErrorResponse;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
-import com.att.nsa.cambria.metabroker.Topic;
-import com.att.nsa.cambria.resources.CambriaEventSet;
-import com.att.nsa.cambria.resources.CambriaOutboundEventStream;
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticator;
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;
-import com.att.nsa.cambria.service.EventsService;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
-import com.att.nsa.cambria.utils.Utils;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.service.standards.MimeTypes;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.util.rrConvertor;
-
-import kafka.producer.KeyedMessage;
-
-/**
- * This class provides the functinality to publish and subscribe message to
- * kafka
- * 
- * @author author
- *
- */
-@Service
-public class EventsServiceImpl implements EventsService {
-       //private static final Logger LOG = Logger.getLogger(EventsServiceImpl.class);
-       private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
-
-       private static final String BATCH_LENGTH = "event.batch.length";
-       private static final String TRANSFER_ENCODING = "Transfer-Encoding";
-       @Autowired
-       private DMaaPErrorMessages errorMessages;
-
-       //@Value("${metrics.send.cambria.topic}")
-       //private String metricsTopic;
-       
-       public void setErrorMessages(DMaaPErrorMessages errorMessages) {
-               this.errorMessages = errorMessages;
-       }
-
-       /**
-        * @param ctx
-        * @param topic
-        * @param consumerGroup
-        * @param clientId
-        * @throws ConfigDbException,
-        *             TopicExistsException, AccessDeniedException,
-        *             UnavailableException, CambriaApiException, IOException
-        * 
-        * 
-        */
-       @Override
-       public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
-                       throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
-                       CambriaApiException, IOException,DMaaPAccessDeniedException {
-               final long startTime = System.currentTimeMillis();
-               final HttpServletRequest req = ctx.getRequest();
-
-               if(clientId == null)
-                       throw new NullPointerException();
-               
-               boolean isAAFTopic=false;
-               // was this host blacklisted?
-               final String remoteAddr = Utils.getRemoteAddress(ctx);;
-               if ( ctx.getConfigReader().getfIpBlackList().contains ( remoteAddr ) )
-               {
-                       
-                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-                                       DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), "Source address [" + remoteAddr +
-                               "] is blacklisted. Please contact the cluster management team."
-                                       ,null,Utils.getFormattedDate(new Date()),topic,
-                                       Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
-                                       null,null);
-                       LOG.info(errRes.toString());
-                       throw new CambriaApiException(errRes);
-               }
-               
-               
-               int limit = CambriaConstants.kNoLimit;
-               if (req.getParameter("limit") != null) {
-                       limit = Integer.parseInt(req.getParameter("limit"));
-               }
-
-               int timeoutMs= CambriaConstants.kNoTimeout;
-               String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"timeout");
-               if(strtimeoutMS!=null)timeoutMs=Integer.parseInt(strtimeoutMS);
-               //int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout", CambriaConstants.kNoTimeout);
-               if (req.getParameter("timeout") != null) {
-                       timeoutMs = Integer.parseInt(req.getParameter("timeout"));
-               }
-
-               // By default no filter is applied if filter is not passed as a
-               // parameter in the request URI
-               String topicFilter = CambriaConstants.kNoFilter;
-               if (null != req.getParameter("filter")) {
-                       topicFilter = req.getParameter("filter");
-               }
-               // pretty to print the messaages in new line
-               String prettyval="0";
-               String strPretty=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"pretty");
-               if (null!=strPretty)prettyval=strPretty;
-               
-               String metaval="0";
-               String strmeta=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"meta");
-               if (null!=strmeta)metaval=strmeta;
-               
-               final boolean pretty = rrConvertor
-                               .convertToBooleanBroad(prettyval);
-               // withMeta to print offset along with message
-               final boolean withMeta = rrConvertor
-                               .convertToBooleanBroad(metaval);
-               
-               
-               /*final boolean pretty = rrConvertor
-                               .convertToBooleanBroad(ctx.getConfigReader().getSettings().getString("pretty", "0"));
-               // withMeta to print offset along with message
-               final boolean withMeta = rrConvertor
-                               .convertToBooleanBroad(ctx.getConfigReader().getSettings().getString("meta", "0"));
-*/
-               final LogWrap logger = new LogWrap ( topic, consumerGroup, clientId);
-               logger.info("fetch: timeout=" + timeoutMs + ", limit=" + limit + ", filter=" + topicFilter);
-
-               // is this user allowed to read this topic?
-               final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
-               final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
-               
-               if (metatopic == null) {
-                       // no such topic.
-                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND, 
-                                       DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(), 
-                                       errorMessages.getTopicNotExist()+"-[" + topic + "]",null,Utils.getFormattedDate(new Date()),topic,null,null,
-                                       clientId,ctx.getRequest().getRemoteHost());
-                       LOG.info(errRes.toString());
-                       throw new CambriaApiException(errRes);
-               }
-               String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
-               if (null==metricTopicname)
-                metricTopicname="msgrtr.apinode.metrics.dmaap";
-               
-                if(null==ctx.getRequest().getHeader("Authorization")&& !topic.equalsIgnoreCase(metricTopicname))
-               {       
-                       if (null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))){
-                       // check permissions
-                       metatopic.checkUserRead(user);  
-                       }
-               }
-               // if headers are not provided then user will be null
-                if(user == null && null!=ctx.getRequest().getHeader("Authorization"))
-               {
-                       // the topic name will be sent by the client
-//                     String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"sub";
-                       DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-                       String permission = aaf.aafPermissionString(topic, "sub");
-                       if(!aaf.aafAuthentication(ctx.getRequest(), permission))
-                       {
-                               ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
-                                               DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-                                               errorMessages.getNotPermitted1()+" read "+errorMessages.getNotPermitted2()+topic,null,Utils.getFormattedDate(new Date()),topic,null,null,
-                                               clientId,ctx.getRequest().getRemoteHost());
-                               LOG.info(errRes.toString());
-                               throw new DMaaPAccessDeniedException(errRes);
-                               
-                       }
-                       isAAFTopic = true;
-               }
-               Consumer c = null;
-               try {
-                       final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
-                       final DMaaPCambriaLimiter rl = ctx.getConfigReader().getfRateLimiter();
-                       rl.onCall(topic, consumerGroup, clientId);
-
-                       c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs);
-
-               /*      final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c,
-                                       ctx.getConfigReader().getSettings()).timeout(timeoutMs).limit(limit).filter(topicFilter)
-                                                       .pretty(pretty).withMeta(withMeta)
-                                                       // .atOffset(topicOffset)
-                                                       .build();*/
-                       final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs).limit(limit).filter(topicFilter)
-                                       .pretty(pretty).withMeta(withMeta).build();
-                       coes.setDmaapContext(ctx);
-                       coes.setTopic(metatopic);
-                       if( isTransEnabled() || isAAFTopic ){
-                               coes.setTransEnabled(true);
-                       }else{
-                       coes.setTransEnabled(false);
-                       }
-                       coes.setTopicStyle(isAAFTopic);
-            
-                       DMaaPResponseBuilder.setNoCacheHeadings(ctx);
-
-                       DMaaPResponseBuilder.respondOkWithStream(ctx, MediaType.APPLICATION_JSON, coes);
-
-                       // No IOException thrown during respondOkWithStream, so commit the
-                       // new offsets to all the brokers
-                       c.commitOffsets();
-                       final int sent = coes.getSentCount();
-
-                       metricsSet.consumeTick(sent);
-                       rl.onSend(topic, consumerGroup, clientId, sent);
-
-                       final long elapsedMs = System.currentTimeMillis() - startTime;
-                       logger.info("Sent " + sent + " msgs in " + elapsedMs + " ms; committed to offset " + c.getOffset());
-
-               } catch (UnavailableException excp) {
-                       logger.warn(excp.getMessage(), excp);
-                       
-                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, 
-                                       DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), 
-                                       errorMessages.getServerUnav()+ excp.getMessage(),null,Utils.getFormattedDate(new Date()),topic,null,null,
-                                       clientId,ctx.getRequest().getRemoteHost());
-                       LOG.info(errRes.toString());
-                       throw new CambriaApiException(errRes);
-                       
-               } catch (CambriaApiException excp) {
-                       logger.warn(excp.getMessage(), excp);
-                       throw excp;
-               } catch (Exception excp) {
-                       logger.warn("Couldn't respond to client, closing cambria consumer", excp);
-                       ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
-                       
-                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, 
-                                       DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), 
-                                       "Couldn't respond to client, closing cambria consumer"+ excp.getMessage(),null,Utils.getFormattedDate(new Date()),topic,null,null,
-                                       clientId,ctx.getRequest().getRemoteHost());
-                       LOG.info(errRes.toString());
-                       throw new CambriaApiException(errRes);
-               } finally {
-                       // If no cache, close the consumer now that we're done with it.
-                       boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
-                       String strkSetting_EnableCache=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,ConsumerFactory.kSetting_EnableCache);
-                       if(null!=strkSetting_EnableCache) kSetting_EnableCache=Boolean.parseBoolean(strkSetting_EnableCache);
-                       //if (!ctx.getConfigReader().getSettings().getBoolean(ConsumerFactory.kSetting_EnableCache,     ConsumerFactory.kDefault_IsCacheEnabled) && (c != null)) {
-                       if (!kSetting_EnableCache && (c != null)) {
-                               c.close();
-
-                       }
-               }
-       }
-
-       /**
-        * @throws missingReqdSetting 
-        * 
-        */
-       @Override
-       public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
-                       final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
-                       CambriaApiException, IOException, missingReqdSetting,DMaaPAccessDeniedException {
-
-               // is this user allowed to write to this topic?
-               final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
-               final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
-               boolean isAAFTopic=false;
-               
-                       // was this host blacklisted?
-                               final String remoteAddr = Utils.getRemoteAddress(ctx);
-                               
-                               if ( ctx.getConfigReader().getfIpBlackList().contains ( remoteAddr ) )
-                               {
-                                       
-                                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-                                                       DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), "Source address [" + remoteAddr +
-                                               "] is blacklisted. Please contact the cluster management team."
-                                                       ,null,Utils.getFormattedDate(new Date()),topic,
-                                                       Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
-                                                       null,null);
-                                       LOG.info(errRes.toString());
-                                       throw new CambriaApiException(errRes);
-                               }
-                               
-                                 String topicNameStd = null;
-                      
-                      //       topicNameStd= ctx.getConfigReader().getSettings().getString("enforced.topic.name.AAF");
-                       topicNameStd= com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,"enforced.topic.name.AAF");
-                       String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
-                        if (null==metricTopicname)
-                                metricTopicname="msgrtr.apinode.metrics.dmaap";
-                       boolean topicNameEnforced=false;
-                       if (null != topicNameStd && topic.startsWith(topicNameStd)  )
-                       {
-                               topicNameEnforced = true;
-                       }
-               
-                      //Here check if the user has rights to publish on the topic
-                      //( This will be called when no auth is added or when UEB API Key Authentication is used)
-                      //checkUserWrite(user) method will throw an error when there is no Auth header added or when the
-                      //user has no publish rights
-                       
-                               if(null != metatopic &&  null != metatopic.getOwner() && !("".equals(metatopic.getOwner())) && null==ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) 
-                               {
-                                       metatopic.checkUserWrite(user);
-                               }
-
-       
-                               
-                               // if headers are not provided then user will be null
-                if(topicNameEnforced || (user == null && null!=ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)))
-               {
-                       // the topic name will be sent by the client
-                                               // String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"pub";
-                                               DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-                                               String permission = aaf.aafPermissionString(topic, "pub");
-                                               if(!aaf.aafAuthentication(ctx.getRequest(), permission))
-                                               {
-                                                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
-                                                                       DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-                                                                       errorMessages.getNotPermitted1()+" publish "+errorMessages.getNotPermitted2()+topic,null,Utils.getFormattedDate(new Date()),topic,
-                                                                       Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
-                                                                       null,null);
-                                                       LOG.info(errRes.toString());
-                                                       throw new DMaaPAccessDeniedException(errRes);
-                                               }
-                                               isAAFTopic=true;
-               }       
-                
-               final HttpServletRequest req = ctx.getRequest();
-
-               // check for chunked input
-               boolean chunked = false;
-               if (null != req.getHeader(TRANSFER_ENCODING)) {
-                       chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
-               }
-               // get the media type, or set it to a generic value if it wasn't
-               // provided
-               String mediaType = req.getContentType();
-               if (mediaType == null || mediaType.length() == 0) {
-                       mediaType = MimeTypes.kAppGenericBinary;
-               }
-
-               if (mediaType.contains("charset=UTF-8")) {
-                       mediaType = mediaType.replace("; charset=UTF-8", "").trim();
-               }
-               
-               String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
-               boolean istransidreqd=false;
-               if (null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")){
-                       istransidreqd = true; 
-               }
-               
-               if (isAAFTopic || istransidreqd ) {
-                       pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
-               }
-               else
-               {
-                       pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
-               }
-                       
-
-       }
-
-       /**
-        * 
-        * @param ctx
-        * @param topic
-        * @param msg
-        * @param defaultPartition
-        * @param chunked
-        * @param mediaType
-        * @throws ConfigDbException
-        * @throws AccessDeniedException
-        * @throws TopicExistsException
-        * @throws CambriaApiException
-        * @throws IOException
-        */
-       private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition,
-                       boolean chunked, String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException,
-                                       CambriaApiException, IOException {
-               final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
-               // setup the event set
-               final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
-
-               // start processing, building a batch to push to the backend
-               final long startMs = System.currentTimeMillis();
-               long count = 0;
-               
-               long maxEventBatch=(long)1024 * 16;
-               String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,BATCH_LENGTH);
-               if(null!=batchlen)maxEventBatch=Long.parseLong(batchlen);
-               
-               // long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
-               final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
-               final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
-
-               try {
-                       // for each message...
-                       Publisher.message m = null;
-                       while ((m = events.next()) != null) {
-                               // add the message to the batch
-                               batch.add(m);
-                               final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
-                                               m.getMessage());
-                               kms.add(data);
-                               // check if the batch is full
-                               final int sizeNow = batch.size();
-                               if (sizeNow > maxEventBatch) {
-                                       ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-                                       kms.clear();
-                                       batch.clear();
-                                       metricsSet.publishTick(sizeNow);
-                                       count += sizeNow;
-                               }
-                       }
-
-                       // send the pending batch
-                       final int sizeNow = batch.size();
-                       if (sizeNow > 0) {
-                               ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-                               kms.clear();
-                               batch.clear();
-                               metricsSet.publishTick(sizeNow);
-                               count += sizeNow;
-                       }
-
-                       final long endMs = System.currentTimeMillis();
-                       final long totalMs = endMs - startMs;
-
-                       LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
-
-                       // build a responseP
-                       final JSONObject response = new JSONObject();
-                       response.put("count", count);
-                       response.put("serverTimeMs", totalMs);
-                       DMaaPResponseBuilder.respondOk(ctx, response);
-
-               } catch (Exception excp) {
-                       int status = HttpStatus.SC_NOT_FOUND;
-                       String errorMsg=null;
-                       if(excp instanceof CambriaApiException) {
-                                status = ((CambriaApiException) excp).getStatus();
-                                JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
-                                JSONObject errObject = new JSONObject(jsonTokener);
-                                errorMsg = (String) errObject.get("message");
-                                       
-                       }
-                       ErrorResponse errRes = new ErrorResponse(status, 
-                                       DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), 
-                                       errorMessages.getPublishMsgError()+":"+topic+"."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,null,Utils.getFormattedDate(new Date()),topic,
-                                       null,ctx.getRequest().getRemoteHost(),
-                                       null,null);
-                       LOG.info(errRes.toString());
-                       throw new CambriaApiException(errRes);
-                       
-                       
-               }
-       }
-
-       /**
-        * 
-        * @param ctx
-        * @param inputStream
-        * @param topic
-        * @param partitionKey
-        * @param requestTime
-        * @param chunked
-        * @param mediaType
-        * @throws ConfigDbException
-        * @throws AccessDeniedException
-        * @throws TopicExistsException
-        * @throws IOException
-        * @throws CambriaApiException
-        */
-       private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
-                       final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
-                                       throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
-                                       CambriaApiException {
-
-               final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
-               // setup the event set
-               final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
-
-               // start processing, building a batch to push to the backend
-               final long startMs = System.currentTimeMillis();
-               long count = 0;
-               long maxEventBatch =  (long)1024 * 16;
-               String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,BATCH_LENGTH);
-                       if(null!=evenlen)maxEventBatch=Long.parseLong(evenlen);
-               //final long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
-               final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
-               final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
-
-               Publisher.message m = null;
-               int messageSequence = 1;
-               Long batchId = 1L;
-               final boolean transactionEnabled = true;
-               int publishBatchCount=0;
-               SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
-
-               //LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
-               try {
-                       // for each message...
-                       batchId=DMaaPContext.getBatchID();
-                       
-                       String responseTransactionId = null;
-                       
-                       while ((m = events.next()) != null) {
-                       
-                               //LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
-                               
-
-                               addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
-                                               transactionEnabled);
-                               messageSequence++;
-
-                               // add the message to the batch
-                               batch.add(m);
-                               
-                               responseTransactionId = m.getLogDetails().getTransactionId();
-                               
-                               JSONObject jsonObject = new JSONObject();
-                               jsonObject.put("message", m.getMessage());
-                               jsonObject.put("transactionId", responseTransactionId);
-                               final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
-                                               jsonObject.toString());
-                               kms.add(data);
-
-                               // check if the batch is full
-                               final int sizeNow = batch.size();
-                               if (sizeNow >= maxEventBatch) {
-                                       String startTime = sdf.format(new Date());
-                                       LOG.info("Batch Start Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch Start Id=" + batchId+"]");
-                                       try {
-                                               ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-                                               //transactionLogs(batch);
-                                               for (message msg : batch) {
-                                                       LogDetails logDetails = msg.getLogDetails();
-                                                       LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
-                                               }
-                                       } catch (Exception excp) {
-                                               
-                                               int status = HttpStatus.SC_NOT_FOUND;
-                                               String errorMsg=null;
-                                               if(excp instanceof CambriaApiException) {
-                                                        status = ((CambriaApiException) excp).getStatus();
-                                                        JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
-                                                        JSONObject errObject = new JSONObject(jsonTokener);
-                                                        errorMsg = (String) errObject.get("message");
-                                               }
-                                               ErrorResponse errRes = new ErrorResponse(status, 
-                                                               DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), 
-                                                               "Transaction-"+errorMessages.getPublishMsgError()+":"+topic+ "."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,
-                                                               null,Utils.getFormattedDate(new Date()),topic,
-                                                               Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
-                                                               null,null);
-                                               LOG.info(errRes.toString());
-                                               throw new CambriaApiException(errRes);
-                                       }
-                                       kms.clear();
-                                       batch.clear();
-                                       metricsSet.publishTick(sizeNow);
-                                       publishBatchCount=sizeNow;
-                                       count += sizeNow;
-                                       //batchId++;
-                                       String endTime = sdf.format(new Date());
-                                       LOG.info("Batch End Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch End Id=" + batchId
-                                                       + ",Batch Total=" + publishBatchCount+",Batch Start Time="+startTime+",Batch End Time="+endTime+"]");
-                                       batchId=DMaaPContext.getBatchID();
-                               }
-                       }
-
-                       // send the pending batch
-                       final int sizeNow = batch.size();
-                       if (sizeNow > 0) {
-                               String startTime = sdf.format(new Date());
-                               LOG.info("Batch Start Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch Start Id=" + batchId+"]");
-                               try {
-                                       ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-                                       //transactionLogs(batch);
-                                       for (message msg : batch) {
-                                               LogDetails logDetails = msg.getLogDetails();
-                                               LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
-                                       }
-                               } catch (Exception excp) {
-                                       int status = HttpStatus.SC_NOT_FOUND;
-                                       String errorMsg=null;
-                                       if(excp instanceof CambriaApiException) {
-                                                status = ((CambriaApiException) excp).getStatus();
-                                                JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
-                                                JSONObject errObject = new JSONObject(jsonTokener);
-                                                errorMsg = (String) errObject.get("message");
-                                       }
-                                       
-                                       ErrorResponse errRes = new ErrorResponse(status, 
-                                                       DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), 
-                                                       "Transaction-"+errorMessages.getPublishMsgError()+":"+topic+"."+ errorMessages.getPublishMsgCount()+count+"."+errorMsg,
-                                                       null,Utils.getFormattedDate(new Date()),topic,
-                                                       Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
-                                                       null,null);
-                                       LOG.info(errRes.toString());
-                                       throw new CambriaApiException(errRes);
-                               }
-                               kms.clear();
-                               metricsSet.publishTick(sizeNow);
-                               count += sizeNow;
-                               //batchId++;
-                               String endTime = sdf.format(new Date());
-                               publishBatchCount=sizeNow;
-                               LOG.info("Batch End Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch End Id=" + batchId
-                                               + ",Batch Total=" + publishBatchCount+",Batch Start Time="+startTime+",Batch End Time="+endTime+"]");
-                       }
-
-                       final long endMs = System.currentTimeMillis();
-                       final long totalMs = endMs - startMs;
-
-                       LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
-
-                       if (null != responseTransactionId) {
-                               ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId));
-                       }
-                       
-                       // build a response
-                       final JSONObject response = new JSONObject();
-                       response.put("count", count);
-                       response.put("serverTimeMs", totalMs);
-                       DMaaPResponseBuilder.respondOk(ctx, response);
-                       
-               } catch (Exception excp) {
-                       int status = HttpStatus.SC_NOT_FOUND;
-                       String errorMsg=null;
-                       if(excp instanceof CambriaApiException) {
-                                status = ((CambriaApiException) excp).getStatus();
-                                JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
-                                JSONObject errObject = new JSONObject(jsonTokener);
-                                errorMsg = (String) errObject.get("message");
-                       }
-                       
-                       ErrorResponse errRes = new ErrorResponse(
-                                       status, 
-                                       DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), 
-                                       "Transaction-"+errorMessages.getPublishMsgError()+":"+topic+"."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,null,Utils.getFormattedDate(new Date()),topic,
-                                       Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
-                                       null,null);
-                       LOG.info(errRes.toString());
-                       throw new CambriaApiException(errRes);
-               }
-       }
-
-       /**
-        * 
-        * @param msg
-        * @param topic
-        * @param request
-        * @param messageCreationTime
-        * @param messageSequence
-        * @param batchId
-        * @param transactionEnabled
-        */
-       private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
-                       final String messageCreationTime, final int messageSequence, final Long batchId,
-                       final boolean transactionEnabled) {
-               LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
-                               transactionEnabled);
-               logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
-               msg.setTransactionEnabled(transactionEnabled);
-               msg.setLogDetails(logDetails);
-       }
-
-
-
-       /**
-        * 
-        * @author author
-        *
-        */
-       private static class LogWrap {
-               private final String fId;
-
-               /**
-                * constructor initialization
-                * 
-                * @param topic
-                * @param cgroup
-                * @param cid
-                */
-               public LogWrap(String topic, String cgroup, String cid) {
-                       fId = "[" + topic + "/" + cgroup + "/" + cid + "] ";
-               }
-
-               /**
-                * 
-                * @param msg
-                */
-               public void info(String msg) {
-                       LOG.info(fId + msg);
-               }
-
-               /**
-                * 
-                * @param msg
-                * @param t
-                */
-               public void warn(String msg, Exception t) {
-                       LOG.warn(fId + msg, t);
-               }
-
-       }
-       
-       private boolean isTransEnabled() {
-               String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
-               boolean istransidreqd=false;
-               if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) ){
-                       istransidreqd = true; 
-               }
-               
-               return istransidreqd;
-
-       }
-
-       private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
-                       final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
-               LogDetails logDetails = new LogDetails();
-               logDetails.setTopicId(topicName);
-               logDetails.setMessageTimestamp(messageTimestamp);
-               logDetails.setPublisherId(Utils.getUserApiKey(request));
-               logDetails.setPublisherIp(request.getRemoteHost());
-               logDetails.setMessageBatchId(batchId);
-               logDetails.setMessageSequence(String.valueOf(messageSequence));
-               logDetails.setTransactionEnabled(transactionEnabled);
-               logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
-               logDetails.setServerIp(request.getLocalAddr());
-               return logDetails;
-       }
-
-       /*public String getMetricsTopic() {
-               return metricsTopic;
-       }
-
-       public void setMetricsTopic(String metricsTopic) {
-               this.metricsTopic = metricsTopic;
-       }*/
-
-}
\ No newline at end of file
index f3f4a5c..9967036 100644 (file)
@@ -30,6 +30,7 @@ import org.junit.Before;
 import org.junit.Test;\r
 \r
 import com.att.ajsc.filemonitor.AJSCPropertiesMap;\r
+import com.att.mr.apiServer.metrics.cambria.DMaaPMetricsSender;\r
 \r
 public class DMaaPMetricsSenderTest {\r
 \r
index 2de5f08..f41f33f 100644 (file)
@@ -21,7 +21,9 @@
 package com.att.nsa.cambria;\r
 \r
 import static org.junit.Assert.*;\r
-import com.att.nsa.cambria.exception.ErrorResponse;\r
+\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.exception.ErrorResponse;\r
 \r
 import java.io.IOException;\r
 \r
index aff9227..b71bf6e 100644 (file)
@@ -21,7 +21,9 @@
 package com.att.nsa.cambria;\r
 \r
 import static org.junit.Assert.*;\r
-import com.att.nsa.cambria.exception.ErrorResponse;\r
+\r
+import com.att.dmf.mr.CambriaApiVersionInfo;\r
+import com.att.dmf.mr.exception.ErrorResponse;\r
 \r
 import java.io.IOException;\r
 \r
index 939c0d3..54ff469 100644 (file)
@@ -28,7 +28,7 @@ import org.junit.runners.Suite.SuiteClasses;
 import org.apache.log4j.Logger;\r
 \r
 @RunWith(Suite.class)\r
-@SuiteClasses({ KafkaConsumerTest.class, KafkaConsumerCacheTest.class, KafkaPublisherTest.class, })\r
+@SuiteClasses({ KafkaConsumerCacheTest.class, KafkaPublisherTest.class, })\r
 public class JUnitTestSuite {\r
        private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class);\r
 \r
index 2489110..06d7b58 100644 (file)
@@ -22,24 +22,58 @@ package com.att.nsa.cambria.backends.kafka;
 \r
 import static org.junit.Assert.*;\r
 \r
+import java.util.concurrent.ConcurrentHashMap;\r
+\r
 import org.apache.curator.framework.CuratorFramework;\r
 import org.junit.After;\r
 import org.junit.Before;\r
 import org.junit.Test;\r
-\r
-import com.att.nsa.cambria.backends.MetricsSet;\r
-import com.att.nsa.cambria.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;\r
-\r
+import org.junit.runner.RunWith;\r
+import org.mockito.Mock;\r
+import org.mockito.MockitoAnnotations;\r
+import org.powermock.api.mockito.PowerMockito;\r
+import org.powermock.modules.junit4.PowerMockRunner;\r
+import org.powermock.core.classloader.annotations.PrepareForTest;\r
+\r
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;\r
+\r
+import com.att.dmf.mr.backends.MetricsSet;\r
+import com.att.dmf.mr.backends.kafka.Kafka011Consumer;\r
+import com.att.dmf.mr.backends.kafka.KafkaConsumerCache;\r
+import com.att.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;\r
+import com.att.dmf.mr.constants.CambriaConstants;\r
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;\r
+\r
+@RunWith(PowerMockRunner.class)\r
+@PrepareForTest({ AJSCPropertiesMap.class })\r
 public class KafkaConsumerCacheTest {\r
+       private KafkaConsumerCache kafkaConsumerCache =null;\r
+       @Mock\r
+       private ConcurrentHashMap<String, Kafka011Consumer> fConsumers;\r
+       @Mock\r
+       private MetricsSet fMetrics;\r
 \r
        @Before\r
        public void setUp() throws Exception {\r
+               MockitoAnnotations.initMocks(this);\r
+               \r
        }\r
 \r
        @After\r
        public void tearDown() throws Exception {\r
        }\r
 \r
+\r
+       @Test\r
+       public void testSweep() {\r
+               kafkaConsumerCache = new KafkaConsumerCache();\r
+               PowerMockito.mockStatic(AJSCPropertiesMap.class);\r
+               PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "kSetting_TouchEveryMs")).thenReturn("100");\r
+               kafkaConsumerCache.sweep();\r
+\r
+       }\r
+       \r
+\r
        // DOES NOT WORK\r
        @Test\r
        public void testStartCache() {\r
@@ -55,11 +89,13 @@ public class KafkaConsumerCacheTest {
                 * e1) { // TODO Auto-generated catch block e1.printStackTrace(); } }\r
                 */\r
 \r
-               KafkaConsumerCache kafka = null;\r
+               \r
                new CuratorFrameworkImpl();\r
                new MetricsSetImpl();\r
+               KafkaConsumerCache kafka=null;\r
                try {\r
-                       kafka = new KafkaConsumerCache("123", null);\r
+                       kafka = new KafkaConsumerCache();\r
+                       kafka.setfApiId("1");\r
                        kafka.startCache("DMAAP", null);\r
                } catch (NoClassDefFoundError e) {\r
 \r
@@ -84,20 +120,17 @@ public class KafkaConsumerCacheTest {
 \r
        }\r
 \r
-       /*@Test\r
-       public void testStopCache() {\r
-\r
-               KafkaConsumerCache kafka = null;\r
-               new CuratorFrameworkImpl();\r
-               new MetricsSetImpl();\r
-               try {\r
-                       kafka = new KafkaConsumerCache("123", null);\r
-                       kafka.stopCache();\r
-               } catch (NoClassDefFoundError e) {\r
-\r
-               }\r
-\r
-       }*/\r
+       /*\r
+        * @Test public void testStopCache() {\r
+        * \r
+        * KafkaConsumerCache kafka = null; new CuratorFrameworkImpl(); new\r
+        * MetricsSetImpl(); try { kafka = new KafkaConsumerCache("123", null);\r
+        * kafka.stopCache(); } catch (NoClassDefFoundError e) {\r
+        * \r
+        * }\r
+        * \r
+        * }\r
+        */\r
 \r
        @Test\r
        public void testGetConsumerFor() {\r
@@ -105,7 +138,7 @@ public class KafkaConsumerCacheTest {
                KafkaConsumerCache kafka = null;\r
 \r
                try {\r
-                       kafka = new KafkaConsumerCache("123", null);\r
+                       kafka = new KafkaConsumerCache();\r
                        kafka.getConsumerFor("testTopic", "CG1", "23");\r
                } catch (NoClassDefFoundError e) {\r
 \r
@@ -119,11 +152,11 @@ public class KafkaConsumerCacheTest {
        @Test\r
        public void testPutConsumerFor() {\r
 \r
-               KafkaConsumer consumer = null;\r
+               Kafka011Consumer consumer = null;\r
                KafkaConsumerCache kafka = null;\r
 \r
                try {\r
-                       kafka = new KafkaConsumerCache("123", null);\r
+                       kafka = new KafkaConsumerCache();\r
 \r
                } catch (NoClassDefFoundError e) {\r
                        try {\r
@@ -145,7 +178,7 @@ public class KafkaConsumerCacheTest {
                KafkaConsumerCache kafka = null;\r
 \r
                try {\r
-                       kafka = new KafkaConsumerCache("123", null);\r
+                       kafka = new KafkaConsumerCache();\r
 \r
                } catch (NoClassDefFoundError e) {\r
                        try {\r
@@ -163,7 +196,7 @@ public class KafkaConsumerCacheTest {
 \r
                KafkaConsumerCache kafka = null;\r
                try {\r
-                       kafka = new KafkaConsumerCache("123", null);\r
+                       kafka = new KafkaConsumerCache();\r
 \r
                } catch (NoClassDefFoundError e) {\r
                        try {\r
@@ -182,7 +215,7 @@ public class KafkaConsumerCacheTest {
                KafkaConsumerCache kafka = null;\r
 \r
                try {\r
-                       kafka = new KafkaConsumerCache("123", null);\r
+                       kafka = new KafkaConsumerCache();\r
                        // kafka.signalOwnership("testTopic", "CG1", "23");\r
                } catch (NoClassDefFoundError e) {\r
                        try {\r
@@ -192,13 +225,13 @@ public class KafkaConsumerCacheTest {
                                e.printStackTrace();\r
                        } catch (NullPointerException e1) {\r
                                // TODO Auto-generated catch block\r
-                               //assertTrue(true);\r
+                               // assertTrue(true);\r
                                e1.printStackTrace();\r
                        }\r
 \r
                }\r
-               \r
-               //assertTrue(true);\r
+\r
+               // assertTrue(true);\r
        }\r
 \r
        @Test\r
@@ -207,7 +240,7 @@ public class KafkaConsumerCacheTest {
                KafkaConsumerCache kafka = null;\r
 \r
                try {\r
-                       kafka = new KafkaConsumerCache("123", null);\r
+                       kafka = new KafkaConsumerCache();\r
                        // kafka.dropConsumer("testTopic", "CG1", "23");\r
                } catch (NoClassDefFoundError e) {\r
                        try {\r
diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerTest.java b/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerTest.java
deleted file mode 100644 (file)
index 37e3604..0000000
+++ /dev/null
@@ -1,334 +0,0 @@
-/*-\r
- * ============LICENSE_START=======================================================\r
- * ONAP Policy Engine\r
- * ================================================================================\r
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.\r
- * ================================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- * \r
- *      http://www.apache.org/licenses/LICENSE-2.0\r
- * \r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END=========================================================\r
- */\r
-\r
-package com.att.nsa.cambria.backends.kafka;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import java.io.IOException;\r
-\r
-import org.junit.After;\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-\r
-import kafka.javaapi.consumer.ConsumerConnector;\r
-\r
-public class KafkaConsumerTest {\r
-\r
-       @Before\r
-       public void setUp() throws Exception {\r
-       }\r
-\r
-       @After\r
-       public void tearDown() throws Exception {\r
-       }\r
-\r
-       // DOES NOT WORK\r
-       @Test\r
-       public void testGetName() {\r
-\r
-               String topic = "testTopic";\r
-               String group = "group1";\r
-               String id = "1";\r
-               ConsumerConnector cc = null;\r
-               KafkaConsumer kafka = null;\r
-\r
-               try {\r
-                       kafka = new KafkaConsumer(topic, group, id, cc);\r
-                       \r
-               } catch (NullPointerException e) {\r
-                       try {\r
-                               kafka.getName();\r
-                       } catch (NullPointerException e1) {\r
-                               // TODO Auto-generated catch block\r
-                               assertTrue(true);\r
-                       }\r
-               }\r
-\r
-       }\r
-\r
-       @Test\r
-       public void testGetCreateTimeMs() {\r
-\r
-               String topic = "testTopic";\r
-               String group = "group1";\r
-               String id = "1";\r
-               ConsumerConnector cc = null;\r
-               \r
-               KafkaConsumer kafka = null;\r
-\r
-               try {\r
-                       kafka = new KafkaConsumer(topic, group, id, cc);\r
-\r
-                       \r
-               } catch (NullPointerException e) {\r
-                       try {\r
-                               kafka.getCreateTimeMs();\r
-                       } catch (NullPointerException e1) {\r
-                               // TODO Auto-generated catch block\r
-                               assertTrue(true);\r
-                       }\r
-               }\r
-\r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
-\r
-       }\r
-\r
-       @Test\r
-       public void testGetLastAccessMs() {\r
-\r
-               String topic = "testTopic";\r
-               String group = "group1";\r
-               String id = "1";\r
-               ConsumerConnector cc = null;\r
-               \r
-               KafkaConsumer kafka = null;\r
-\r
-               try {\r
-                       kafka = new KafkaConsumer(topic, group, id, cc);\r
-\r
-                       \r
-               } catch (NullPointerException e) {\r
-                       try {\r
-                               kafka.getLastAccessMs();\r
-                       } catch (NullPointerException e1) {\r
-                               // TODO Auto-generated catch block\r
-                               assertTrue(true);\r
-                       }\r
-               }\r
-\r
-       }\r
-\r
-       @Test\r
-       public void testNextMessage() {\r
-\r
-               String topic = "testTopic";\r
-               String group = "group1";\r
-               String id = "1";\r
-               ConsumerConnector cc = null;\r
-               \r
-               KafkaConsumer kafka = null;\r
-\r
-               try {\r
-                       kafka = new KafkaConsumer(topic, group, id, cc);\r
-\r
-                       \r
-               } catch (NullPointerException e) {\r
-                       try {\r
-                               kafka.nextMessage();\r
-                       } catch (NullPointerException e1) {\r
-                               // TODO Auto-generated catch block\r
-                               assertTrue(true);\r
-                       }\r
-               }\r
-\r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
-\r
-       }\r
-\r
-       @Test\r
-       public void testGetOffset() {\r
-\r
-               String topic = "testTopic";\r
-               String group = "group1";\r
-               String id = "1";\r
-               ConsumerConnector cc = null;\r
-               \r
-               KafkaConsumer kafka = null;\r
-\r
-               try {\r
-\r
-                       kafka = new KafkaConsumer(topic, group, id, cc);\r
-\r
-               } catch (NullPointerException e) {\r
-                       try {\r
-\r
-                               kafka.getOffset();\r
-                       } catch (NullPointerException e1) {\r
-                               // TODO Auto-generated catch block\r
-                               assertTrue(true);\r
-                       }\r
-               }\r
-\r
-       }\r
-\r
-       @Test\r
-       public void testCommitOffsets() {\r
-\r
-               String topic = "testTopic";\r
-               String group = "group1";\r
-               String id = "1";\r
-               ConsumerConnector cc = null;\r
-               \r
-               KafkaConsumer kafka = null;\r
-\r
-               try {\r
-\r
-                       kafka = new KafkaConsumer(topic, group, id, cc);\r
-\r
-                       \r
-               } catch (NullPointerException e) {\r
-                       try {\r
-                               kafka.commitOffsets();\r
-                       } catch (NullPointerException e1) {\r
-                               // TODO Auto-generated catch block\r
-                               assertTrue(true);\r
-                       }\r
-               }\r
-\r
-       }\r
-\r
-       @Test\r
-       public void testTouch() {\r
-\r
-               String topic = "testTopic";\r
-               String group = "group1";\r
-               String id = "1";\r
-               ConsumerConnector cc = null;\r
-               \r
-               KafkaConsumer kafka = null;\r
-\r
-               try {\r
-\r
-                       kafka = new KafkaConsumer(topic, group, id, cc);\r
-\r
-                       \r
-               } catch (NullPointerException e) {\r
-                       try {\r
-                               kafka.touch();\r
-                       } catch (NullPointerException e1) {\r
-                               // TODO Auto-generated catch block\r
-                               assertTrue(true);\r
-                       }\r
-               }\r
-\r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
-\r
-       }\r
-\r
-       @Test\r
-       public void testGetLastTouch() {\r
-\r
-               String topic = "testTopic";\r
-               String group = "group1";\r
-               String id = "1";\r
-               ConsumerConnector cc = null;\r
-               \r
-               KafkaConsumer kafka = null;\r
-\r
-               try {\r
-\r
-                       kafka = new KafkaConsumer(topic, group, id, cc);\r
-\r
-                       \r
-               } catch (NullPointerException e) {\r
-                       try {\r
-                               kafka.getLastTouch();\r
-                       } catch (NullPointerException e1) {\r
-                               // TODO Auto-generated catch block\r
-                               assertTrue(true);\r
-                       }\r
-               }\r
-\r
-       }\r
-\r
-       @Test\r
-       public void testClose() {\r
-\r
-               String topic = "testTopic";\r
-               String group = "group1";\r
-               String id = "1";\r
-               ConsumerConnector cc = null;\r
-               \r
-               KafkaConsumer kafka = null;\r
-\r
-               try {\r
-\r
-                       kafka = new KafkaConsumer(topic, group, id, cc);\r
-\r
-                       \r
-               } catch (NullPointerException e) {\r
-                       try {\r
-                               kafka.close();\r
-                       } catch (NullPointerException e1) {\r
-                               // TODO Auto-generated catch block\r
-                               assertTrue(true);\r
-                       }\r
-               }\r
-\r
-       }\r
-\r
-       @Test\r
-       public void testGetConsumerGroup() {\r
-\r
-               String topic = "testTopic";\r
-               String group = "group1";\r
-               String id = "1";\r
-               ConsumerConnector cc = null;\r
-               \r
-               KafkaConsumer kafka = null;\r
-\r
-               try {\r
-\r
-                       kafka = new KafkaConsumer(topic, group, id, cc);\r
-\r
-                       \r
-               } catch (NullPointerException e) {\r
-                       try {\r
-                               kafka.getConsumerGroup();\r
-                       } catch (NullPointerException e1) {\r
-                               // TODO Auto-generated catch block\r
-                               assertTrue(true);\r
-                       }\r
-               }\r
-\r
-       }\r
-\r
-       @Test\r
-       public void testGetConsumerId() {\r
-\r
-               String topic = "testTopic";\r
-               String group = "group1";\r
-               String id = "1";\r
-               ConsumerConnector cc = null;\r
-               \r
-               KafkaConsumer kafka = null;\r
-\r
-               try {\r
-\r
-                       kafka = new KafkaConsumer(topic, group, id, cc);\r
-\r
-                       \r
-\r
-               } catch (NullPointerException e) {\r
-                       try {\r
-                               kafka.getConsumerId();\r
-                       } catch (NullPointerException e1) {\r
-                               // TODO Auto-generated catch block\r
-                               assertTrue(true);\r
-                       }\r
-               }\r
-\r
-       }\r
-\r
-}\r
index 465c66a..3673845 100644 (file)
@@ -33,7 +33,7 @@ import org.junit.ClassRule;
 import org.junit.Test;\r
 \r
 import com.att.ajsc.filemonitor.AJSCPropertiesMap;\r
-import com.att.nsa.cambria.backends.Publisher.message;\r
+import com.att.dmf.mr.backends.Publisher.message;\r
 import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;\r
 \r
 import kafka.common.FailedToSendMessageException;\r
index a32ee98..b5f7b74 100644 (file)
@@ -25,7 +25,7 @@ import java.util.Map;
 \r
 import org.json.JSONObject;\r
 \r
-import com.att.nsa.cambria.backends.MetricsSet;\r
+import com.att.dmf.mr.backends.MetricsSet;\r
 import com.att.nsa.metrics.CdmMeasuredItem;\r
 \r
 public class MetricsSetImpl implements MetricsSet {\r
index ba5a7c6..fc11552 100644 (file)
@@ -29,7 +29,7 @@ import org.apache.log4j.Logger;
 \r
 @RunWith(Suite.class)\r
 @SuiteClasses({ MemoryConsumerFactoryTest.class, MemoryMetaBrokerTest.class, MemoryQueueTest.class, \r
-       MemoryQueuePublisherTest.class, MessageDropperTest.class, MessageLoggerTest.class, })\r
+       MemoryQueuePublisherTest.class,  MessageLoggerTest.class, })\r
 public class JUnitTestSuite {\r
        private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class);\r
 \r
index 7b470bc..d6fe6e6 100644 (file)
@@ -28,6 +28,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.backends.memory.MemoryConsumerFactory;\r
+\r
 public class MemoryConsumerFactoryTest {\r
 \r
        @Before\r
@@ -46,8 +48,9 @@ public class MemoryConsumerFactoryTest {
                String topic = "testTopic";\r
                String consumerGroupId = "CG1";\r
                String clientId = "C1";\r
+               String remoteHost="remoteHost";\r
                int timeoutMs = 1000; \r
-               factory.getConsumerFor(topic, consumerGroupId, clientId, timeoutMs);\r
+               factory.getConsumerFor(topic, consumerGroupId, clientId, timeoutMs,remoteHost);\r
                \r
                String trueValue = "True";\r
                assertTrue(trueValue.equalsIgnoreCase("True"));\r
index b15cb2f..2c41068 100644 (file)
@@ -28,7 +28,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;\r
+import com.att.dmf.mr.backends.memory.MemoryMetaBroker;\r
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;\r
 \r
 public class MemoryMetaBrokerTest {\r
 \r
index 8ee4bd1..4522734 100644 (file)
@@ -28,6 +28,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.backends.memory.MemoryQueuePublisher;\r
+\r
 \r
 public class MemoryQueuePublisherTest {\r
 \r
@@ -44,7 +46,7 @@ public class MemoryQueuePublisherTest {
                MemoryQueuePublisher publisher = new MemoryQueuePublisher(null, null);\r
                \r
                try {\r
-                       publisher.sendBatchMessage("testTopic", null);\r
+                       publisher.sendBatchMessageNew("testTopic", null);\r
                } catch (IOException e) {\r
                        // TODO Auto-generated catch block\r
                        e.printStackTrace();\r
index fdb84d0..637add2 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.backends.memory.MemoryQueue;\r
+\r
 \r
 public class MemoryQueueTest {\r
 \r
diff --git a/src/test/java/com/att/nsa/cambria/backends/memory/MessageDropperTest.java b/src/test/java/com/att/nsa/cambria/backends/memory/MessageDropperTest.java
deleted file mode 100644 (file)
index c27621e..0000000
+++ /dev/null
@@ -1,98 +0,0 @@
-/*-\r
- * ============LICENSE_START=======================================================\r
- * ONAP Policy Engine\r
- * ================================================================================\r
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.\r
- * ================================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- * \r
- *      http://www.apache.org/licenses/LICENSE-2.0\r
- * \r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END=========================================================\r
- */\r
-\r
-package com.att.nsa.cambria.backends.memory;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import java.io.IOException;\r
-import java.util.ArrayList;\r
-import java.util.List;\r
-\r
-import org.junit.After;\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-\r
-import com.att.nsa.cambria.backends.Publisher.message;\r
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;\r
-\r
-import kafka.producer.KeyedMessage;\r
-\r
-public class MessageDropperTest {\r
-\r
-       @Before\r
-       public void setUp() throws Exception {\r
-       }\r
-\r
-       @After\r
-       public void tearDown() throws Exception {\r
-       }\r
-\r
-       @Test\r
-       public void testSendMessage() {\r
-               MessageDropper dropper = new MessageDropper();\r
-               \r
-               try {\r
-                       dropper.sendMessage("testTopic", null);\r
-               } catch (IOException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               }\r
-               \r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
-               \r
-       }\r
-       \r
-       @Test\r
-       public void testSendMessages() {\r
-               MessageDropper dropper = new MessageDropper();\r
-                \r
-               try {\r
-                       dropper.sendMessages("testTopic", null);\r
-               } catch (IOException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               }\r
-               \r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
-               \r
-       }\r
-       \r
-       \r
-       @Test\r
-       public void testSendBatchMessage() {\r
-               MessageDropper dropper = new MessageDropper();\r
-               \r
-               try {\r
-                       dropper.sendBatchMessage("testTopic", null);\r
-               } catch (IOException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               }\r
-               \r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
-               \r
-       }\r
-       \r
-\r
-}\r
index d2ae163..81e620c 100644 (file)
@@ -28,6 +28,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.backends.memory.MessageLogger;\r
+\r
 \r
 public class MessageLoggerTest {\r
 \r
@@ -82,7 +84,7 @@ public class MessageLoggerTest {
                MessageLogger dropper = new MessageLogger();\r
                \r
                try {\r
-                       dropper.sendBatchMessage("testTopic", null);\r
+                       dropper.sendBatchMessageNew("testTopic", null);\r
                } catch (IOException e) {\r
                        // TODO Auto-generated catch block\r
                        e.printStackTrace();\r
index 45fa039..7fa2bfd 100644 (file)
@@ -21,6 +21,8 @@
 package com.att.nsa.cambria.beans;\r
 \r
 import static org.junit.Assert.*;\r
+\r
+import com.att.dmf.mr.beans.ApiKeyBean;\r
 import com.att.nsa.metrics.CdmMetricsRegistry;\r
 \r
 import java.io.IOException;\r
index 0d795ac..763c2e7 100644 (file)
@@ -21,6 +21,8 @@
 package com.att.nsa.cambria.beans;\r
 \r
 import static org.junit.Assert.*;\r
+\r
+import com.att.dmf.mr.beans.ApiKeyBean;\r
 import com.att.nsa.metrics.CdmMetricsRegistry;\r
 \r
 import java.io.IOException;\r
index cb10fa1..27f7c15 100644 (file)
@@ -21,6 +21,8 @@
 package com.att.nsa.cambria.beans;\r
 \r
 import static org.junit.Assert.*;\r
+\r
+import com.att.dmf.mr.beans.ApiKeyBean;\r
 import com.att.nsa.metrics.CdmMetricsRegistry;\r
 \r
 import java.io.IOException;\r
index 86a75a2..6191c70 100644 (file)
@@ -21,6 +21,8 @@
 package com.att.nsa.cambria.beans;\r
 \r
 import static org.junit.Assert.*;\r
+\r
+import com.att.dmf.mr.beans.ApiKeyBean;\r
 import com.att.nsa.metrics.CdmMetricsRegistry;\r
 \r
 import java.io.IOException;\r
index e4ad2c3..99fc9aa 100644 (file)
@@ -21,6 +21,8 @@
 package com.att.nsa.cambria.beans;\r
 \r
 import static org.junit.Assert.*;\r
+\r
+import com.att.dmf.mr.beans.ApiKeyBean;\r
 import com.att.nsa.metrics.CdmMetricsRegistry;\r
 \r
 import java.io.IOException;\r
index 2dd8db8..ca368e3 100644 (file)
@@ -21,6 +21,8 @@
 package com.att.nsa.cambria.beans;\r
 \r
 import static org.junit.Assert.*;\r
+\r
+import com.att.dmf.mr.beans.ApiKeyBean;\r
 import com.att.nsa.metrics.CdmMetricsRegistry;\r
 \r
 import java.io.IOException;\r
index 10f2613..8c3568f 100644 (file)
@@ -26,7 +26,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.beans.DMaaPCambriaLimiter;\r
 \r
 public class DMaaPCambriaLimiterTest {\r
 \r
@@ -53,9 +54,9 @@ public class DMaaPCambriaLimiterTest {
        @Test\r
        public void testOnCall() {\r
                \r
-               DMaaPCambriaLimiter limiter = new DMaaPCambriaLimiter(1, 3);\r
+               DMaaPCambriaLimiter limiter = new DMaaPCambriaLimiter(1,2, 3);\r
                try {\r
-                       limiter.onCall("testTopic", "ConsumerGroup1", "client2");\r
+                       limiter.onCall("testTopic", "ConsumerGroup1", "client2","remoteHost");\r
                } catch (CambriaApiException e) {\r
                        // TODO Auto-generated catch block\r
                        e.printStackTrace();\r
@@ -69,7 +70,7 @@ public class DMaaPCambriaLimiterTest {
        @Test\r
        public void testOnSend() {\r
                \r
-               DMaaPCambriaLimiter limiter = new DMaaPCambriaLimiter(3, 3);\r
+               DMaaPCambriaLimiter limiter = new DMaaPCambriaLimiter(3,3, 3);\r
                limiter.onSend("testTopic", "consumerGroup1", "client1", 100);\r
                \r
                String trueValue = "True";\r
index 8362f26..784b97b 100644 (file)
@@ -26,6 +26,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+\r
 public class DMaaPContextTest {\r
 \r
        @Before\r
index f7c89d0..88baf58 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+\r
 public class DMaaPContextTest2 {\r
 \r
        @Before\r
index 972f0dd..b6b12b0 100644 (file)
@@ -28,6 +28,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+\r
 public class DMaaPContextTest3 {\r
 \r
        @Before\r
index 2843d9c..95fbeef 100644 (file)
@@ -29,6 +29,8 @@ import org.junit.Before;
 import org.junit.Test;\r
 import org.springframework.mock.web.MockHttpServletRequest;\r
 \r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+\r
 public class DMaaPContextTest4 {\r
 \r
        @Before\r
index 6cc8209..2b2c9e1 100644 (file)
@@ -28,6 +28,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+\r
 public class DMaaPContextTest5 {\r
 \r
        @Before\r
index af6af04..b87021d 100644 (file)
@@ -28,6 +28,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+\r
 public class DMaaPContextTest6 {\r
 \r
        @Before\r
index eb28f67..67ade96 100644 (file)
@@ -26,6 +26,10 @@ import org.I0Itec.zkclient.exception.ZkNoNodeException;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
+import static org.mockito.Matchers.any;
+
+import java.util.Properties;
+
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.MockitoAnnotations;
@@ -33,36 +37,48 @@ import org.powermock.api.mockito.PowerMockito;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import com.att.dmf.mr.CambriaApiException;
+import org.apache.kafka.clients.admin.AdminClient;
+
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.metabroker.Broker1.TopicExistsException;
 import com.att.nsa.configs.ConfigDb;
 import com.att.nsa.configs.ConfigDbException;
 import com.att.nsa.configs.ConfigPath;
 
-import kafka.admin.AdminUtils;
 
 @RunWith(PowerMockRunner.class)
-@PrepareForTest({ AdminUtils.class, ZkClientFactory.class })
+@PrepareForTest({ AdminClient.class})
 public class DMaaPKafkaMetaBrokerTest {
 
        @InjectMocks
        private DMaaPKafkaMetaBroker dMaaPKafkaMetaBroker;
-
        @Mock
-       private ZkClient zk;
+       private ZkClient fZk;
+       @Mock
+       private AdminClient fKafkaAdminClient;
+       @Mock
+       private AdminClient client;
        @Mock
        private ConfigDb configDb;
        @Mock
        ConfigPath fBaseTopicData;
        @Mock
        private ZkClient zkClient;
+       @Mock
+       Topic mockTopic;
 
        @Before
        public void setUp() {
                MockitoAnnotations.initMocks(this);
-               PowerMockito.mockStatic(AdminUtils.class);
-               PowerMockito.mockStatic(ZkClientFactory.class);
+               PowerMockito.mockStatic(AdminClient.class);
+               //PowerMockito.when(AdminClient.create (any(Properties.class) )).thenReturn(fKafkaAdminClient);
+               
+               //PowerMockito.mockStatic(AdminUtils.class);
                PowerMockito.when(configDb.parse("/topics")).thenReturn(fBaseTopicData);
+               
 
        }
 
@@ -80,7 +96,6 @@ public class DMaaPKafkaMetaBrokerTest {
        @Test
        public void testcreateTopic() {
                try {
-                       PowerMockito.when(ZkClientFactory.createZkClient()).thenReturn(zkClient);
                        dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true);
                } catch (CambriaApiException e) {
                        // TODO Auto-generated catch block
@@ -93,12 +108,12 @@ public class DMaaPKafkaMetaBrokerTest {
                }
 
        }
+       
 
        @Test
        public void testcreateTopic_wrongPartition() {
                try {
 
-                       PowerMockito.when(ZkClientFactory.createZkClient()).thenReturn(zkClient);
                        dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 0, 1, true);
                } catch (CambriaApiException e) {
                        assertTrue(true);
@@ -115,7 +130,6 @@ public class DMaaPKafkaMetaBrokerTest {
        public void testcreateTopic_wrongReplica() {
                try {
 
-                       PowerMockito.when(ZkClientFactory.createZkClient()).thenReturn(zkClient);
                        dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 0, true);
                } catch (CambriaApiException e) {
                        assertTrue(true);
@@ -131,7 +145,6 @@ public class DMaaPKafkaMetaBrokerTest {
        @Test
        public void testcreateTopic_error1() {
                try {
-                       PowerMockito.when(ZkClientFactory.createZkClient()).thenThrow(new ZkNoNodeException());
                        dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true);
                } catch (CambriaApiException e) {
                        assertTrue(true);
@@ -147,8 +160,6 @@ public class DMaaPKafkaMetaBrokerTest {
        @Test
        public void testcreateTopic_error2() {
                try {
-                       PowerMockito.when(ZkClientFactory.createZkClient())
-                                       .thenThrow(new kafka.admin.AdminOperationException("error"));
                        dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true);
                } catch (CambriaApiException e) {
                        assertTrue(true);
@@ -163,7 +174,6 @@ public class DMaaPKafkaMetaBrokerTest {
        @Test
        public void testcreateTopic_error3() {
                try {
-                       PowerMockito.when(ZkClientFactory.createZkClient()).thenThrow(new kafka.common.TopicExistsException());
                        dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true);
                } catch (CambriaApiException e) {
                        // TODO Auto-generated catch block
@@ -180,7 +190,6 @@ public class DMaaPKafkaMetaBrokerTest {
        @Test
        public void testDeleteTopic() {
                try {
-                       PowerMockito.when(ZkClientFactory.createZkClient()).thenReturn(zkClient);
                        dMaaPKafkaMetaBroker.deleteTopic("testtopic");
                } catch (CambriaApiException e) {
                        // TODO Auto-generated catch block
@@ -198,7 +207,6 @@ public class DMaaPKafkaMetaBrokerTest {
        @Test
        public void testDeleteTopic_error1() {
                try {
-                       PowerMockito.when(ZkClientFactory.createZkClient()).thenThrow(new ZkNoNodeException());
                        dMaaPKafkaMetaBroker.deleteTopic("testtopic");
                } catch (CambriaApiException e) {
                        assertTrue(true);
@@ -214,8 +222,6 @@ public class DMaaPKafkaMetaBrokerTest {
        @Test
        public void testDeleteTopic_error2() {
                try {
-                       PowerMockito.when(ZkClientFactory.createZkClient())
-                                       .thenThrow(new kafka.admin.AdminOperationException("error"));
                        dMaaPKafkaMetaBroker.deleteTopic("testtopic");
                } catch (CambriaApiException e) {
                        assertTrue(true);
@@ -231,7 +237,6 @@ public class DMaaPKafkaMetaBrokerTest {
        @Test
        public void testDeleteTopic_error3() {
                try {
-                       PowerMockito.when(ZkClientFactory.createZkClient()).thenThrow(new kafka.common.TopicExistsException());
                        dMaaPKafkaMetaBroker.deleteTopic("testtopic");
                } catch (CambriaApiException e) {
                        // TODO Auto-generated catch block
index 6c900a4..e90675e 100644 (file)
@@ -28,7 +28,7 @@ import org.junit.runners.Suite.SuiteClasses;
 import org.apache.log4j.Logger;\r
 \r
 @RunWith(Suite.class)\r
-@SuiteClasses({ DMaaPKafkaMetaBrokerTest.class, ApiKeyBeanTest.class, ApiKeyBeanTest2.class, ApiKeyBeanTest3.class, ApiKeyBeanTest4.class, ApiKeyBeanTest5.class, ApiKeyBeanTest6.class, \r
+@SuiteClasses({  ApiKeyBeanTest.class, ApiKeyBeanTest2.class, ApiKeyBeanTest3.class, ApiKeyBeanTest4.class, ApiKeyBeanTest5.class, ApiKeyBeanTest6.class, \r
        DMaaPCambriaLimiterTest.class, DMaaPContextTest.class, DMaaPContextTest2.class, \r
        DMaaPContextTest3.class,DMaaPContextTest4.class,DMaaPContextTest5.class,DMaaPContextTest6.class,\r
        LogDetailsTest.class, LogDetailsTest2.class,LogDetailsTest3.class,LogDetailsTest4.class,LogDetailsTest5.class,LogDetailsTest6.class,\r
index b409e1e..e7c2173 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest {\r
 \r
        @Before\r
index d36f3d7..44a6e45 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest10 {\r
 \r
        @Before\r
index 8af2297..3a935e6 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest11 {\r
 \r
        @Before\r
index a737a87..1b2b3f5 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest12 {\r
 \r
        @Before\r
index 0e3782a..a30112e 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest13 {\r
 \r
        @Before\r
index 6d8c12f..b04573d 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest14 {\r
 \r
        @Before\r
index 3018668..8bbbb28 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest15 {\r
 \r
        @Before\r
index 058121c..eef7322 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest16 {\r
 \r
        @Before\r
index c0a6ede..c19b46d 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest17 {\r
 \r
        @Before\r
index 1475141..e894ae4 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest18 {\r
 \r
        @Before\r
index 17d615c..acb818d 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest2 {\r
 \r
        @Before\r
index c829a91..b9b664d 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest3 {\r
 \r
        @Before\r
index 5a151e3..b17aadc 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest4 {\r
 \r
        @Before\r
index 731768b..3beeab8 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest5 {\r
 \r
        @Before\r
index 8271a0c..98d85d8 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest6 {\r
 \r
        @Before\r
index c5d6237..8753762 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest7 {\r
 \r
        @Before\r
index 71a66b7..d88ed11 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest8 {\r
 \r
        @Before\r
index 109d60b..b47b152 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.LogDetails;\r
+\r
 public class LogDetailsTest9 {\r
 \r
        @Before\r
index 0173cd1..5a60767 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.TopicBean;\r
+\r
 public class TopicBeanTest {\r
 \r
        @Before\r
index e4a96f6..4ebe601 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.TopicBean;\r
+\r
 public class TopicBeanTest10 {\r
 \r
        @Before\r
index 08514f7..e6958de 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.TopicBean;\r
+\r
 public class TopicBeanTest2 {\r
 \r
        @Before\r
index 093461a..61dd6f9 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.TopicBean;\r
+\r
 public class TopicBeanTest3 {\r
 \r
        @Before\r
index cdce1d5..894f959 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.TopicBean;\r
+\r
 public class TopicBeanTest4 {\r
 \r
        @Before\r
index 41a23ab..bd69e2e 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.TopicBean;\r
+\r
 public class TopicBeanTest5 {\r
 \r
        @Before\r
index 5fb9b2a..db68525 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.TopicBean;\r
+\r
 public class TopicBeanTest6 {\r
 \r
        @Before\r
index 385d05d..3a6fdb6 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.TopicBean;\r
+\r
 public class TopicBeanTest7 {\r
 \r
        @Before\r
index d6b36a7..7cf8049 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.TopicBean;\r
+\r
 public class TopicBeanTest8 {\r
 \r
        @Before\r
index f2a4a91..c6487e1 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.beans.TopicBean;\r
+\r
 public class TopicBeanTest9 {\r
 \r
        @Before\r
index 7233c6c..30cb460 100644 (file)
@@ -23,6 +23,7 @@
 package com.att.nsa.cambria.embed;\r
 \r
 import java.io.File;\r
+import java.util.Arrays;\r
 import java.util.Map;\r
 import java.util.Properties;\r
 \r
@@ -30,25 +31,29 @@ import org.apache.commons.io.FileUtils;
 import org.apache.curator.framework.CuratorFramework;\r
 \r
 import com.att.ajsc.filemonitor.AJSCPropertiesMap;\r
-import com.att.nsa.cambria.backends.kafka.KafkaPublisher;\r
-import com.att.nsa.cambria.backends.memory.MemoryMetaBroker;\r
-import com.att.nsa.cambria.backends.memory.MemoryQueue;\r
-import com.att.nsa.cambria.beans.DMaaPKafkaConsumerFactory;\r
-import com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker;\r
-import com.att.nsa.cambria.beans.DMaaPMetricsSet;\r
-import com.att.nsa.cambria.beans.DMaaPZkClient;\r
-import com.att.nsa.cambria.beans.DMaaPZkConfigDb;\r
-import com.att.nsa.cambria.constants.CambriaConstants;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticator;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;\r
-import com.att.nsa.cambria.utils.ConfigurationReader;\r
-import com.att.nsa.cambria.utils.DMaaPCuratorFactory;\r
-import com.att.nsa.cambria.utils.PropertyReader;\r
+import com.att.dmf.mr.backends.kafka.KafkaPublisher;\r
+import com.att.dmf.mr.backends.memory.MemoryMetaBroker;\r
+import com.att.dmf.mr.backends.memory.MemoryQueue;\r
+import org.apache.kafka.clients.admin.AdminClient;\r
+import org.apache.kafka.clients.admin.AdminClientConfig;\r
+import org.apache.kafka.clients.admin.CreateTopicsResult;\r
+import org.apache.kafka.clients.admin.NewTopic;\r
+import org.apache.kafka.common.KafkaFuture;\r
+import com.att.dmf.mr.beans.DMaaPKafkaConsumerFactory;\r
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;\r
+import com.att.dmf.mr.beans.DMaaPMetricsSet;\r
+import com.att.dmf.mr.beans.DMaaPZkClient;\r
+import com.att.dmf.mr.beans.DMaaPZkConfigDb;\r
+import com.att.dmf.mr.constants.CambriaConstants;\r
+import com.att.dmf.mr.security.DMaaPAuthenticator;\r
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;\r
+import com.att.dmf.mr.utils.ConfigurationReader;\r
+import com.att.dmf.mr.utils.DMaaPCuratorFactory;\r
+import com.att.dmf.mr.utils.PropertyReader;\r
 import com.att.nsa.security.db.BaseNsaApiDbImpl;\r
 import com.att.nsa.security.db.simple.NsaSimpleApiKey;\r
 import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;\r
 \r
-import kafka.admin.AdminUtils;\r
 \r
 public class EmbedConfigurationReader {\r
        private static final String DEFAULT_KAFKA_LOG_DIR = "/kafka_embedded";\r
@@ -63,7 +68,7 @@ public class EmbedConfigurationReader {
 \r
     private static final String groupId = "groupID";\r
     String dir;\r
-\r
+    private  AdminClient fKafkaAdminClient;\r
     KafkaLocal kafkaLocal;\r
        \r
        public void setUp() throws Exception {\r
@@ -89,8 +94,18 @@ public class EmbedConfigurationReader {
             map.put("kafka.metadata.broker.list", LOCALHOST_BROKER);\r
             \r
             DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(new PropertyReader());\r
-            if(!AdminUtils.topicExists(dMaaPZkClient, TEST_TOPIC))\r
-               AdminUtils.createTopic(dMaaPZkClient, TEST_TOPIC, 3, 1, new Properties());\r
+            \r
+            final Properties props = new Properties ();\r
+            props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092" );\r
+           props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret'");\r
+                props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");            \r
+            props.put("sasl.mechanism", "PLAIN");\r
+            fKafkaAdminClient = AdminClient.create ( props );\r
+            \r
+           // if(!AdminUtils.topicExists(dMaaPZkClient, TEST_TOPIC))\r
+            // AdminUtils.createTopic(dMaaPZkClient, TEST_TOPIC, 3, 1, new Properties());\r
+            final NewTopic topicRequest = new NewTopic ( TEST_TOPIC, 3, new Integer(1).shortValue () );\r
+                        fKafkaAdminClient.createTopics ( Arrays.asList ( topicRequest ) );\r
             Thread.sleep(5000);\r
         } catch (Exception e){\r
             e.printStackTrace(System.out);\r
@@ -118,7 +133,9 @@ public class EmbedConfigurationReader {
 \r
        public void tearDown() throws Exception {\r
                DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(new PropertyReader());\r
-               AdminUtils.deleteTopic(dMaaPZkClient, TEST_TOPIC);\r
+               if(fKafkaAdminClient!=null)\r
+               fKafkaAdminClient.deleteTopics(Arrays.asList(TEST_TOPIC));\r
+               //AdminUtils.deleteTopic(dMaaPZkClient, TEST_TOPIC);\r
                //dMaaPZkClient.delete(dir + DEFAULT_KAFKA_LOG_DIR);\r
                //dMaaPZkClient.delete(dir + DEFAULT_ZOOKEEPER_LOG_DIR);\r
                kafkaLocal.stop();\r
@@ -135,7 +152,7 @@ public class EmbedConfigurationReader {
                DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(propertyReader);\r
                DMaaPZkConfigDb dMaaPZkConfigDb = new DMaaPZkConfigDb(dMaaPZkClient, propertyReader);\r
                CuratorFramework curatorFramework = DMaaPCuratorFactory.getCurator(new PropertyReader());\r
-               DMaaPKafkaConsumerFactory dMaaPKafkaConsumerFactory = new DMaaPKafkaConsumerFactory(propertyReader, dMaaPMetricsSet, curatorFramework);\r
+               DMaaPKafkaConsumerFactory dMaaPKafkaConsumerFactory = new DMaaPKafkaConsumerFactory(dMaaPMetricsSet, curatorFramework,null);\r
                MemoryQueue memoryQueue = new MemoryQueue();\r
                MemoryMetaBroker memoryMetaBroker = new MemoryMetaBroker(memoryQueue, dMaaPZkConfigDb);\r
                BaseNsaApiDbImpl<NsaSimpleApiKey> baseNsaApiDbImpl = new BaseNsaApiDbImpl<>(dMaaPZkConfigDb, new NsaSimpleApiKeyFactory());\r
index 12cc0e8..b10c565 100644 (file)
@@ -21,9 +21,9 @@
 package com.att.nsa.cambria.exception;\r
 \r
 import static org.junit.Assert.*;\r
-import com.att.nsa.cambria.exception.DMaaPCambriaExceptionMapper;\r
-import com.att.nsa.cambria.transaction.TransactionObj;\r
-import com.att.nsa.cambria.transaction.impl.DMaaPSimpleTransactionFactory;\r
+import com.att.dmf.mr.exception.DMaaPCambriaExceptionMapper;\r
+import com.att.dmf.mr.transaction.TransactionObj;\r
+import com.att.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory;\r
 \r
 import org.junit.After;\r
 import org.junit.Before;\r
index 6f171a9..1a97bb8 100644 (file)
@@ -21,9 +21,9 @@
 package com.att.nsa.cambria.exception;\r
 \r
 import static org.junit.Assert.*;\r
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;\r
-import com.att.nsa.cambria.transaction.TransactionObj;\r
-import com.att.nsa.cambria.transaction.impl.DMaaPSimpleTransactionFactory;\r
+import com.att.dmf.mr.exception.DMaaPErrorMessages;\r
+import com.att.dmf.mr.transaction.TransactionObj;\r
+import com.att.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory;\r
 \r
 import org.junit.After;\r
 import org.junit.Before;\r
index 4a6fb90..0f53416 100644 (file)
@@ -21,9 +21,9 @@
 package com.att.nsa.cambria.exception;\r
 \r
 import static org.junit.Assert.*;\r
-import com.att.nsa.cambria.exception.DMaaPWebExceptionMapper;\r
-import com.att.nsa.cambria.transaction.TransactionObj;\r
-import com.att.nsa.cambria.transaction.impl.DMaaPSimpleTransactionFactory;\r
+import com.att.dmf.mr.exception.DMaaPWebExceptionMapper;\r
+import com.att.dmf.mr.transaction.TransactionObj;\r
+import com.att.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory;\r
 \r
 import org.junit.After;\r
 import org.junit.Before;\r
index 88656a7..aa463a7 100644 (file)
@@ -21,9 +21,9 @@
 package com.att.nsa.cambria.exception;\r
 \r
 import static org.junit.Assert.*;\r
-import com.att.nsa.cambria.exception.ErrorResponse;\r
-import com.att.nsa.cambria.transaction.TransactionObj;\r
-import com.att.nsa.cambria.transaction.impl.DMaaPSimpleTransactionFactory;\r
+import com.att.dmf.mr.exception.ErrorResponse;\r
+import com.att.dmf.mr.transaction.TransactionObj;\r
+import com.att.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory;\r
 \r
 import org.junit.After;\r
 import org.junit.Before;\r
index c27f9c3..441064f 100644 (file)
@@ -29,6 +29,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.listener.CambriaServletContextListener;\r
+\r
 public class CambriaServletContextListenerTest {\r
 \r
        @Before\r
index 336533b..31451b7 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.listener.DME2EndPointLoader;\r
+\r
 public class DME2EndPointLoaderTest {\r
 \r
        @Before\r
index 5e5638a..9c5e4e0 100644 (file)
@@ -23,7 +23,9 @@ package com.att.nsa.cambria.metabroker;
 import java.util.ArrayList;\r
 import java.util.List;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.metabroker.Broker;\r
+import com.att.dmf.mr.metabroker.Topic;\r
 import com.att.nsa.configs.ConfigDbException;\r
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;\r
 \r
index 0cc1d30..edc5641 100644 (file)
@@ -27,8 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;\r
 import com.att.nsa.configs.ConfigDbException;\r
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;\r
 \r
index e5423ab..9539073 100644 (file)
@@ -22,6 +22,7 @@ package com.att.nsa.cambria.metabroker;
 \r
 import java.util.Set;\r
 \r
+import com.att.dmf.mr.metabroker.Topic;\r
 import com.att.nsa.configs.ConfigDbException;\r
 import com.att.nsa.security.NsaAcl;\r
 import com.att.nsa.security.NsaApiKey;\r
index 1caab25..956d27b 100644 (file)
@@ -27,7 +27,7 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.metrics.publisher.CambriaPublisherUtility;\r
+import com.att.dmf.mr.metrics.publisher.CambriaPublisherUtility;\r
 \r
 public class CambriaPublisherUtilityTest {\r
 \r
index c2b3391..5916a18 100644 (file)
@@ -29,10 +29,10 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.listener.DME2EndPointLoader;\r
-import com.att.nsa.cambria.metrics.publisher.CambriaConsumer;\r
-import com.att.nsa.cambria.metrics.publisher.CambriaPublisherUtility;\r
-import com.att.nsa.cambria.metrics.publisher.DMaaPCambriaClientFactory;\r
+import com.att.dmf.mr.listener.DME2EndPointLoader;\r
+import com.att.dmf.mr.metrics.publisher.CambriaConsumer;\r
+import com.att.dmf.mr.metrics.publisher.CambriaPublisherUtility;\r
+import com.att.dmf.mr.metrics.publisher.DMaaPCambriaClientFactory;\r
 \r
 public class DMaaPCambriaClientFactoryTest {\r
        \r
index 1dd94dd..3242e83 100644 (file)
@@ -25,6 +25,8 @@ import static org.junit.Assert.*;
 import java.util.ArrayList;\r
 import java.util.Arrays;\r
 import java.util.Collection;\r
+\r
+import com.att.dmf.mr.metrics.publisher.impl.CambriaBaseClient;\r
 import com.att.eelf.configuration.EELFLogger;\r
 \r
 import org.json.JSONArray;\r
index 68bb223..e484722 100644 (file)
@@ -27,6 +27,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.metrics.publisher.impl.Clock;\r
+\r
 public class ClockTest {\r
 \r
        @Before\r
index 20aa481..fe034a5 100644 (file)
@@ -32,6 +32,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.metrics.publisher.impl.DMaaPCambriaConsumerImpl;\r
+\r
 public class DMaaPCambriaConsumerImplTest {\r
        \r
        private DMaaPCambriaConsumerImpl consumer = null; \r
index ceff346..0c2c4ba 100644 (file)
@@ -28,6 +28,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher;\r
+\r
 \r
 \r
 public class DMaaPCambriaSimplerBatchPublisherTest {\r
index bff1179..b9fa1e5 100644 (file)
@@ -30,7 +30,9 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.resources.CambriaEventSet;\r
+import com.att.dmf.mr.resources.CambriaOutboundEventStream;\r
 \r
 \r
 public class CambriaEventSetTest {\r
@@ -54,14 +56,14 @@ public class CambriaEventSetTest {
                InputStream stream = new ByteArrayInputStream(str.getBytes());\r
                try {\r
                        event = new CambriaEventSet("application/cambria", stream, true, "hi");\r
-               } catch (CambriaApiException e) {\r
+               } catch (Exception e) {\r
                        // TODO Auto-generated catch block\r
                        e.printStackTrace();\r
                }\r
                \r
                try {\r
                        event.next();\r
-               } catch (CambriaApiException | IOException e) {\r
+               } catch (Exception e) {\r
                        // TODO Auto-generated catch block\r
                        e.printStackTrace();\r
                }\r
index 78d18d7..8310bba 100644 (file)
@@ -26,8 +26,9 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.constants.CambriaConstants;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.constants.CambriaConstants;\r
+import com.att.dmf.mr.resources.CambriaOutboundEventStream;\r
 \r
 public class CambriaOutboundEventStreamTest {\r
 \r
index 9894963..a308491 100644 (file)
@@ -24,14 +24,16 @@ import static org.junit.Assert.*;
 \r
 import java.io.IOException;\r
 import java.io.InputStream;\r
-import com.att.nsa.cambria.backends.Publisher.message;\r
+import com.att.dmf.mr.backends.Publisher.message;\r
+import com.att.dmf.mr.resources.streamReaders.CambriaJsonStreamReader;\r
+\r
 import org.apache.commons.io.IOUtils;\r
 import org.junit.After;\r
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
-import com.att.nsa.cambria.backends.Publisher.message;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.backends.Publisher.message;\r
 \r
 public class CambriaJsonStreamReaderTest {\r
        \r
index 1a51712..a5a3aac 100644 (file)
@@ -24,13 +24,15 @@ import static org.junit.Assert.*;
 \r
 import java.io.IOException;\r
 import java.io.InputStream;\r
-import com.att.nsa.cambria.backends.Publisher.message;\r
+import com.att.dmf.mr.backends.Publisher.message;\r
+import com.att.dmf.mr.resources.streamReaders.CambriaRawStreamReader;\r
+\r
 import org.apache.commons.io.IOUtils;\r
 import org.junit.After;\r
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
+import com.att.dmf.mr.CambriaApiException;\r
 \r
 public class CambriaRawStreamReaderTest {\r
 \r
index 4052bf4..d992183 100644 (file)
@@ -24,13 +24,15 @@ import static org.junit.Assert.*;
 \r
 import java.io.IOException;\r
 import java.io.InputStream;\r
-import com.att.nsa.cambria.backends.Publisher.message;\r
+import com.att.dmf.mr.backends.Publisher.message;\r
+import com.att.dmf.mr.resources.streamReaders.CambriaStreamReader;\r
+\r
 import org.apache.commons.io.IOUtils;\r
 import org.junit.After;\r
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
+import com.att.dmf.mr.CambriaApiException;\r
 \r
 public class CambriaStreamReaderTest {\r
 \r
index ab9918c..e7b0c83 100644 (file)
@@ -24,13 +24,15 @@ import static org.junit.Assert.*;
 \r
 import java.io.IOException;\r
 import java.io.InputStream;\r
-import com.att.nsa.cambria.backends.Publisher.message;\r
+import com.att.dmf.mr.backends.Publisher.message;\r
+import com.att.dmf.mr.resources.streamReaders.CambriaStreamReader;\r
+\r
 import org.apache.commons.io.IOUtils;\r
 import org.junit.After;\r
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
+import com.att.dmf.mr.CambriaApiException;\r
 \r
 public class CambriaTextStreamReaderTest {\r
 \r
index 98efcc5..82a9c2b 100644 (file)
@@ -29,7 +29,8 @@ import org.junit.Before;
 import org.junit.Test;\r
 import org.springframework.mock.web.MockHttpServletRequest;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.security.DMaaPAAFAuthenticatorImpl;\r
 \r
 \r
 \r
index 5bca325..2f91db2 100644 (file)
@@ -29,8 +29,10 @@ import org.junit.Before;
 import org.junit.Test;\r
 import org.springframework.mock.web.MockHttpServletRequest;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.security.DMaaPAuthenticator;\r
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;\r
 \r
 \r
 \r
index fbd6dc8..89ba6c1 100644 (file)
@@ -29,10 +29,11 @@ import org.junit.Before;
 import org.junit.Test;\r
 import org.springframework.mock.web.MockHttpServletRequest;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticator;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.security.DMaaPAuthenticator;\r
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;\r
+import com.att.dmf.mr.security.impl.DMaaPMechIdAuthenticator;\r
 \r
 \r
 \r
index 1b1b0ab..d437bf3 100644 (file)
@@ -29,10 +29,11 @@ import org.junit.Before;
 import org.junit.Test;\r
 import org.springframework.mock.web.MockHttpServletRequest;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticator;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.security.DMaaPAuthenticator;\r
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;\r
+import com.att.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator;\r
 \r
 \r
 \r
index 2391203..fd87cdb 100644 (file)
@@ -25,11 +25,12 @@ import static org.junit.Assert.*;
 \r
 import java.io.IOException;\r
 \r
-import com.att.nsa.cambria.backends.ConsumerFactory;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;\r
-import com.att.nsa.cambria.utils.ConfigurationReader;\r
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;\r
+import com.att.dmf.mr.backends.ConsumerFactory;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;\r
+import com.att.dmf.mr.service.impl.AdminServiceImpl;\r
+import com.att.dmf.mr.utils.ConfigurationReader;\r
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;\r
 import com.att.nsa.configs.ConfigDbException;\r
 import com.att.nsa.limits.Blacklist;\r
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;\r
index ff092f0..89b25af 100644 (file)
@@ -29,13 +29,14 @@ import java.util.HashSet;
 import java.util.Map;\r
 import java.util.Set;\r
 \r
-import com.att.nsa.cambria.backends.ConsumerFactory;\r
-import com.att.nsa.cambria.beans.ApiKeyBean;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;\r
-import com.att.nsa.cambria.utils.ConfigurationReader;\r
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;\r
-import com.att.nsa.cambria.utils.Emailer;\r
+import com.att.dmf.mr.backends.ConsumerFactory;\r
+import com.att.dmf.mr.beans.ApiKeyBean;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;\r
+import com.att.dmf.mr.service.impl.ApiKeysServiceImpl;\r
+import com.att.dmf.mr.utils.ConfigurationReader;\r
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;\r
+import com.att.dmf.mr.utils.Emailer;\r
 import com.att.nsa.configs.ConfigDbException;\r
 import com.att.nsa.limits.Blacklist;\r
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;\r
index 04d3a22..baef7be 100644 (file)
@@ -26,11 +26,12 @@ import java.io.ByteArrayInputStream;
 import java.io.IOException;\r
 import java.io.InputStream;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
-import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;\r
-import com.att.nsa.cambria.transaction.DMaaPTransactionObjDB.KeyExistsException;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;\r
+import com.att.dmf.mr.service.impl.BaseTransactionDbImpl;\r
+import com.att.dmf.mr.transaction.DMaaPTransactionObjDB.KeyExistsException;\r
 import com.att.nsa.configs.ConfigDbException;\r
 import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;\r
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;\r
index d1c2d2a..8cd560d 100644 (file)
  * ============LICENSE_END=========================================================\r
  */\r
 \r
-\r
 package com.att.nsa.cambria.service.impl;\r
+\r
 import static org.mockito.Mockito.when;\r
 import static org.mockito.Matchers.anyString;\r
+import static org.junit.Assert.assertFalse;\r
 import static org.junit.Assert.assertTrue;\r
 \r
 import java.io.ByteArrayInputStream;\r
@@ -31,6 +32,7 @@ import java.io.InputStream;
 import java.lang.reflect.Constructor;\r
 import java.lang.reflect.InvocationTargetException;\r
 import java.lang.reflect.Method;\r
+import java.util.ConcurrentModificationException;\r
 import java.util.Map;\r
 import java.util.Properties;\r
 \r
@@ -39,6 +41,7 @@ import org.junit.Before;
 import org.junit.Test;\r
 import org.junit.runner.RunWith;\r
 import org.mockito.Mock;\r
+import org.mockito.MockitoAnnotations;\r
 import org.powermock.api.mockito.PowerMockito;\r
 import org.powermock.core.classloader.annotations.PrepareForTest;\r
 import org.powermock.modules.junit4.PowerMockRunner;\r
@@ -48,419 +51,262 @@ import org.springframework.mock.web.MockHttpServletResponse;
 \r
 import com.att.ajsc.beans.PropertiesMapBean;\r
 import com.att.ajsc.filemonitor.AJSCPropertiesMap;\r
-import com.att.nsa.cambria.CambriaApiException;\r
-import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;\r
-import com.att.nsa.cambria.beans.DMaaPCambriaLimiter;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.constants.CambriaConstants;\r
-import com.att.nsa.cambria.embed.EmbedConfigurationReader;\r
-import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;\r
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;\r
-import com.att.nsa.cambria.metabroker.Topic;\r
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticator;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;\r
-import com.att.nsa.cambria.utils.ConfigurationReader;\r
-import com.att.nsa.cambria.utils.PropertyReader;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.security.DMaaPAAFAuthenticator;\r
+import com.att.dmf.mr.security.DMaaPAuthenticator;\r
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;\r
+import com.att.dmf.mr.utils.ConfigurationReader;\r
+import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;\r
+import com.att.dmf.mr.beans.DMaaPCambriaLimiter;\r
+import com.att.dmf.mr.backends.ConsumerFactory;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;\r
+import com.att.dmf.mr.constants.CambriaConstants;\r
+import com.att.dmf.mr.exception.DMaaPAccessDeniedException;\r
+import com.att.dmf.mr.exception.DMaaPErrorMessages;\r
+import com.att.dmf.mr.metabroker.Topic;\r
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;\r
+import com.att.dmf.mr.service.impl.EventsServiceImpl;\r
+import com.att.dmf.mr.utils.PropertyReader;\r
 import com.att.nsa.configs.ConfigDbException;\r
 import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;\r
 import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException;\r
 import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;\r
 import com.att.nsa.limits.Blacklist;\r
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;\r
+import com.att.nsa.security.NsaApiKey;\r
 import com.att.nsa.security.db.simple.NsaSimpleApiKey;\r
 \r
 import kafka.admin.AdminUtils;\r
 \r
-\r
-public class EventsServiceImplTest { \r
+@RunWith(PowerMockRunner.class)\r
+@PrepareForTest({ DMaaPAuthenticatorImpl.class, AJSCPropertiesMap.class })\r
+public class EventsServiceImplTest {\r
 \r
        private InputStream iStream = null;\r
        DMaaPContext dMaapContext = new DMaaPContext();\r
        EventsServiceImpl service = new EventsServiceImpl();\r
        DMaaPErrorMessages pErrorMessages = new DMaaPErrorMessages();\r
-       \r
+       @Mock\r
        ConfigurationReader configurationReader;\r
-       EmbedConfigurationReader embedConfigurationReader = new EmbedConfigurationReader();\r
-       \r
+       @Mock\r
+       Blacklist blacklist;\r
+       @Mock\r
+       DMaaPAuthenticator<NsaSimpleApiKey> dmaaPAuthenticator;\r
+       @Mock\r
+       DMaaPAAFAuthenticator dmaapAAFauthenticator;\r
+       @Mock\r
+       NsaApiKey user;\r
+       @Mock\r
+       NsaSimpleApiKey nsaSimpleApiKey;\r
+       @Mock\r
+       DMaaPKafkaMetaBroker dmaapKafkaMetaBroker;\r
+       @Mock\r
+       Topic createdTopic;\r
+       @Mock\r
+       ConsumerFactory factory;\r
 \r
        @Before\r
        public void setUp() throws Exception {\r
-\r
+               MockitoAnnotations.initMocks(this);\r
                String source = "source of my InputStream";\r
                iStream = new ByteArrayInputStream(source.getBytes("UTF-8"));\r
-               \r
-               configurationReader = embedConfigurationReader.buildConfigurationReader();\r
-               \r
+\r
                MockHttpServletRequest request = new MockHttpServletRequest();\r
                MockHttpServletResponse response = new MockHttpServletResponse();\r
                dMaapContext.setRequest(request);\r
                dMaapContext.setResponse(response);\r
+               when(blacklist.contains(anyString())).thenReturn(false);\r
+               when(configurationReader.getfIpBlackList()).thenReturn(blacklist);\r
                dMaapContext.setConfigReader(configurationReader);\r
-               \r
+\r
                service.setErrorMessages(pErrorMessages);\r
-               \r
-               Map<String, String> map = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop);\r
-        map.put("timeout", "1000");\r
-               \r
+               PowerMockito.mockStatic(AJSCPropertiesMap.class);\r
+               when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout")).thenReturn("100");\r
+\r
+               AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout");\r
+\r
        }\r
 \r
-       @After\r
-       public void tearDown() throws Exception {\r
-               embedConfigurationReader.tearDown();\r
+       @Test(expected = CambriaApiException.class)\r
+       public void testGetEvents() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,\r
+                       TopicExistsException, AccessDeniedException, UnavailableException, IOException {\r
+               when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey);\r
+               when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
+               when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);\r
+               when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic);\r
+               PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory);\r
+               service.getEvents(dMaapContext, "testTopic", "CG1", "23");\r
        }\r
 \r
-       @Test(expected=NullPointerException.class)\r
-       public void testGetEventsForCambriaApiException() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException, IOException {\r
-               service.getEvents(dMaapContext, "testTopic", "CG1", null);\r
+       @Test(expected = CambriaApiException.class)\r
+       public void testGetEventsBlackListErr() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,\r
+                       TopicExistsException, AccessDeniedException, UnavailableException, IOException {\r
+               when(blacklist.contains(anyString())).thenReturn(true);\r
+               when(configurationReader.getfIpBlackList()).thenReturn(blacklist);\r
+               dMaapContext.setConfigReader(configurationReader);\r
+               service.getEvents(dMaapContext, "testTopic", "CG1", "23");\r
        }\r
-       \r
-       @Test(expected=CambriaApiException.class)\r
-       public void testGetEventsForNoTopic() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException, IOException {\r
-                               \r
+\r
+       @Test(expected = CambriaApiException.class)\r
+       public void testGetEventsNoTopicError() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,\r
+                       TopicExistsException, AccessDeniedException, UnavailableException, IOException {\r
+               when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey);\r
+               when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
+               when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);\r
+               when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(null);\r
+               PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory);\r
                service.getEvents(dMaapContext, "testTopic", "CG1", "23");\r
        }\r
-       \r
-       \r
-       @Test(expected=CambriaApiException.class)\r
-       public void testPushEvents() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting, invalidSettingValue, loadException {\r
-               \r
-               //AdminUtils.createTopic(configurationReader.getZk(), "testTopic", 10, 1, new Properties());\r
-               \r
+\r
+       @Test(expected = CambriaApiException.class)\r
+       public void testGetEventsuserNull() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,\r
+                       TopicExistsException, AccessDeniedException, UnavailableException, IOException {\r
+               when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(null);\r
+               when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
+               when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);\r
+               when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic);\r
+               PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory);\r
+               MockHttpServletRequest mockRequest = new MockHttpServletRequest();\r
+               mockRequest.addHeader("Authorization", "passed");\r
+               dMaapContext.setRequest(mockRequest);\r
+               dMaapContext.getRequest().getHeader("Authorization");\r
+               service.getEvents(dMaapContext, "testTopic", "CG1", "23");\r
+       }\r
+\r
+       @Test(expected = CambriaApiException.class)\r
+       public void testGetEventsExcp2() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,\r
+                       TopicExistsException, AccessDeniedException, UnavailableException, IOException {\r
+               when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey);\r
+               when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
+               when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);\r
+               when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic);\r
+               PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory);\r
+               when(configurationReader.getfRateLimiter()).thenThrow(new ConcurrentModificationException("Error occurred"));\r
+               service.getEvents(dMaapContext, "testTopic", "CG1", "23");\r
+       }\r
+\r
+       @Test(expected = CambriaApiException.class)\r
+       public void testPushEvents() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,\r
+                       TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting,\r
+                       invalidSettingValue, loadException {\r
+\r
+               // AdminUtils.createTopic(configurationReader.getZk(), "testTopic", 10,\r
+               // 1, new Properties());\r
+\r
                configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader()));\r
-               \r
+\r
+               when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey);\r
+               when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
+               when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);\r
+               when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic);\r
+               PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory);\r
+\r
                service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00");\r
-               \r
+\r
                service.getEvents(dMaapContext, "testTopic", "CG1", "23");\r
-               \r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
+\r
+               /*\r
+                * String trueValue = "True";\r
+                * assertTrue(trueValue.equalsIgnoreCase("True"));\r
+                */\r
 \r
        }\r
 \r
-       /*@Test\r
-       public void testPushEvents() {\r
-\r
-               EventsServiceImpl service = new EventsServiceImpl();\r
-\r
-               try {\r
-\r
-                       // InputStream iStream = new\r
-                       // ByteArrayInputStream(source.getBytes("UTF-8"));\r
-                       service.pushEvents(new DMaaPContext(), "testTopic", iStream, "3", "12:00:00");\r
-\r
-               } catch (org.json.JSONException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IOException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (DMaaPAccessDeniedException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (CambriaApiException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (ConfigDbException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (TopicExistsException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (AccessDeniedException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (missingReqdSetting e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (NullPointerException e) {\r
-                       // TODO Auto-generated catch block\r
-                       // e.printStackTrace();\r
-                       assertTrue(true);\r
-               }\r
-\r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
+       @Test(expected = CambriaApiException.class)\r
+       public void testPushEventsBlackListedIp() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,\r
+                       TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting,\r
+                       invalidSettingValue, loadException {\r
 \r
-       }\r
+               // AdminUtils.createTopic(configurationReader.getZk(), "testTopic", 10,\r
+               // 1, new Properties());\r
+               when(blacklist.contains(anyString())).thenReturn(true);\r
+               when(configurationReader.getfIpBlackList()).thenReturn(blacklist);\r
+               configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader()));\r
+               when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey);\r
+               when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
+               when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);\r
+               when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic);\r
+               PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory);\r
 \r
-       @Test\r
-       public void testPushEvents2() {\r
-               Class clazz;\r
-               try {\r
-                       clazz = Class.forName("EventsServiceImpl");\r
-                       Object obj = clazz.newInstance();\r
-                       Method method = clazz.getDeclaredMethod("pushEvents", null);\r
-                       method.setAccessible(true);\r
-                       method.invoke(obj, new DMaaPContext(), "testTopic", iStream, "partition", true, "media");\r
-\r
-               } catch (ClassNotFoundException e1) {\r
-                       // TODO Auto-generated catch block\r
-                       e1.printStackTrace();\r
-               } catch (InstantiationException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalAccessException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (NoSuchMethodException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (SecurityException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalArgumentException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (InvocationTargetException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               }\r
-\r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
+               service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00");\r
 \r
        }\r
-       \r
-       @Test\r
-       public void testPushEvents3() {\r
-               Class clazz;\r
-               try {\r
-                       clazz = Class.forName("EventsServiceImpl");\r
-                       Object obj = clazz.newInstance();\r
-                       Method method = clazz.getDeclaredMethod("pushEvents", null);\r
-                       method.setAccessible(true);\r
-                       method.invoke(obj, new DMaaPContext(), iStream, "testTopic", iStream, "partition", true, "media");\r
-\r
-               } catch (ClassNotFoundException e1) {\r
-                       // TODO Auto-generated catch block\r
-                       e1.printStackTrace();\r
-               } catch (InstantiationException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalAccessException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (NoSuchMethodException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (SecurityException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalArgumentException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (InvocationTargetException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               }\r
-\r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
 \r
-       }\r
+       @Test(expected = NullPointerException.class)\r
+       public void testPushEventsNoUser() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,\r
+                       TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting,\r
+                       invalidSettingValue, loadException {\r
 \r
-       @Test\r
-       public void testAddTransactionDetailsToMessage() {\r
-               Class clazz;\r
-               try {\r
-                       clazz = Class.forName("EventsServiceImpl");\r
-                       Object obj = clazz.newInstance();\r
-                       Method method = clazz.getDeclaredMethod("addTransactionDetailsToMessage", null);\r
-                       method.setAccessible(true);\r
-                       method.invoke(obj, new MessageTest(), "testTopic", null, "11:00:00", 1234, 100l, true);\r
-\r
-               } catch (ClassNotFoundException e1) {\r
-                       // TODO Auto-generated catch block\r
-                       e1.printStackTrace();\r
-               } catch (InstantiationException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalAccessException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (NoSuchMethodException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (SecurityException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalArgumentException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (InvocationTargetException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               }\r
-\r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
+               configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader()));\r
+\r
+               when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(null);\r
+               when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
+               when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);\r
+               when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic);\r
+               PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory);\r
+               MockHttpServletRequest mockRequest = new MockHttpServletRequest();\r
+               mockRequest.addHeader("Authorization", "passed");\r
+               mockRequest.addHeader("Authorization", "passed");\r
+               dMaapContext.setRequest(mockRequest);\r
+               dMaapContext.getRequest().getHeader("Authorization");\r
+               service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00");\r
 \r
        }\r
 \r
-       @Test\r
-       public void testIsTransEnabled() {\r
-               Class clazz;\r
-               try {\r
-                       clazz = Class.forName("EventsServiceImpl");\r
-                       Object obj = clazz.newInstance();\r
-                       Method method = clazz.getDeclaredMethod("isTransEnabled", null);\r
-                       method.setAccessible(true);\r
-                       method.invoke(obj, null);\r
-\r
-               } catch (ClassNotFoundException e1) {\r
-                       // TODO Auto-generated catch block\r
-                       e1.printStackTrace();\r
-               } catch (InstantiationException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalAccessException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (NoSuchMethodException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (SecurityException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalArgumentException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (InvocationTargetException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               }\r
-\r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
+       @Test(expected = CambriaApiException.class)\r
+       public void testPushEventsWtTransaction() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,\r
+                       TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting,\r
+                       invalidSettingValue, loadException {\r
 \r
-       }\r
+               configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader()));\r
 \r
-       @Test\r
-       public void testGenerateLogDetails() {\r
-               Class clazz;\r
-               try {\r
-                       clazz = Class.forName("EventsServiceImpl");\r
-                       Object obj = clazz.newInstance();\r
-                       Method method = clazz.getDeclaredMethod("generateLogDetails", null);\r
-                       method.setAccessible(true);\r
-                       method.invoke(obj, "testTopic", null, "11:00:00", 1234, 100l, true);\r
-\r
-               } catch (ClassNotFoundException e1) {\r
-                       // TODO Auto-generated catch block\r
-                       e1.printStackTrace();\r
-               } catch (InstantiationException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalAccessException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (NoSuchMethodException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (SecurityException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalArgumentException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (InvocationTargetException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               }\r
-\r
-               String trueValue = "True";\r
-               assertTrue(trueValue.equalsIgnoreCase("True"));\r
+               when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey);\r
+               when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
+               when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);\r
+               when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic);\r
+               PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory);\r
+               when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "transidUEBtopicreqd")).thenReturn("true");\r
+\r
+               service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00");\r
 \r
        }\r
        \r
+       @Test(expected = CambriaApiException.class)\r
+       public void testPushEventsWtTransactionError() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,\r
+                       TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting,\r
+                       invalidSettingValue, loadException {\r
 \r
-       @Test\r
-       public void testInfo() {\r
-\r
-               String foreNameString = "EventsServiceImpl" + "$" + "LogWrap";\r
-               Object parent = new EventsServiceImpl();\r
-\r
-               Class<?> innerClass;\r
-               try {\r
-                       innerClass = Class.forName(foreNameString);\r
-                       Constructor<?> constructor = innerClass.getDeclaredConstructor(EventsServiceImpl.class);\r
-                       constructor.setAccessible(true);\r
-                       Object child = constructor.newInstance(parent);\r
-\r
-                       // invoking method on inner class object\r
-                       Method method = innerClass.getDeclaredMethod("info", null);\r
-                       method.setAccessible(true);// in case of unaccessible method\r
-                       method.invoke(child, "msg");\r
-               } catch (ClassNotFoundException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (NoSuchMethodException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (SecurityException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (InstantiationException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalAccessException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalArgumentException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (InvocationTargetException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               }\r
-\r
-               assertTrue(true);\r
+               configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader()));\r
+\r
+               when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey);\r
+               when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
+               when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);\r
+               when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic);\r
+               PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory);\r
+               when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "transidUEBtopicreqd")).thenReturn("true");\r
+               when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "event.batch.length")).thenReturn("0");\r
+               when(configurationReader.getfPublisher()).thenThrow(new ConcurrentModificationException("Error occurred"));\r
+\r
+               service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00");\r
 \r
        }\r
        \r
        @Test\r
-       public void testWarn() {\r
-\r
-               String foreNameString = "EventsServiceImpl" + "$" + "LogWrap";\r
-               Object parent = new EventsServiceImpl();\r
-\r
-               Class<?> innerClass;\r
-               try {\r
-                       innerClass = Class.forName(foreNameString);\r
-                       Constructor<?> constructor = innerClass.getDeclaredConstructor(EventsServiceImpl.class);\r
-                       constructor.setAccessible(true);\r
-                       Object child = constructor.newInstance(parent);\r
-\r
-                       // invoking method on inner class object\r
-                       Method method = innerClass.getDeclaredMethod("warn", null);\r
-                       method.setAccessible(true);// in case of unaccessible method\r
-                       method.invoke(child, "msg", null);\r
-               } catch (ClassNotFoundException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (NoSuchMethodException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (SecurityException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (InstantiationException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalAccessException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (IllegalArgumentException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               } catch (InvocationTargetException e) {\r
-                       // TODO Auto-generated catch block\r
-                       e.printStackTrace();\r
-               }\r
-\r
-               assertTrue(true);\r
+       public void testIsTransEnabled1() {\r
+\r
+               when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,\r
+                               "transidUEBtopicreqd")).thenReturn("true");\r
+                 assertTrue(service.isTransEnabled());\r
 \r
        }\r
-*/\r
+       @Test\r
+       public void testIsTransEnabled2() {\r
+\r
+               when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,\r
+                               "transidUEBtopicreqd")).thenReturn("false");\r
+                 assertFalse(service.isTransEnabled());\r
+\r
+       }\r
+\r
 }\r
index 310dd3c..8abafcc 100644 (file)
@@ -29,7 +29,7 @@ import org.apache.log4j.Logger;
 @RunWith(Suite.class)\r
 @SuiteClasses({ UIServiceImplTest.class, AdminServiceImplemTest.class, ApiKeysServiceImplTest.class, \r
        ShowConsumerCacheTest.class,TopicServiceImplTest.class, TransactionServiceImplTest.class, MMServiceImplTest.class, \r
-       BaseTransactionDbImplTest.class, EventsServiceImplTest.class, MetricsServiceImplTest.class})\r
+       BaseTransactionDbImplTest.class,  MetricsServiceImplTest.class,EventsServiceImplTest.class})\r
 public class JUnitTestSuite {\r
        private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class);\r
 \r
index 8a69a8a..f97952c 100644 (file)
@@ -26,25 +26,27 @@ import java.io.ByteArrayInputStream;
 import java.io.IOException;\r
 import java.io.InputStream;\r
 import java.util.Arrays;\r
+import java.util.ConcurrentModificationException;\r
 import java.util.HashSet;\r
 import java.util.Map;\r
 import java.util.Set;\r
 \r
 import com.att.ajsc.beans.PropertiesMapBean;\r
 import com.att.ajsc.filemonitor.AJSCPropertiesMap;\r
-import com.att.nsa.cambria.CambriaApiException;\r
-import com.att.nsa.cambria.backends.ConsumerFactory;\r
-import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker;\r
-import com.att.nsa.cambria.constants.CambriaConstants;\r
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;\r
-import com.att.nsa.cambria.metabroker.Topic;\r
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;\r
-import com.att.nsa.cambria.utils.ConfigurationReader;\r
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;\r
-import com.att.nsa.cambria.utils.Emailer;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.backends.ConsumerFactory;\r
+import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;\r
+import com.att.dmf.mr.constants.CambriaConstants;\r
+import com.att.dmf.mr.exception.DMaaPErrorMessages;\r
+import com.att.dmf.mr.metabroker.Topic;\r
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;\r
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;\r
+import com.att.dmf.mr.service.impl.MMServiceImpl;\r
+import com.att.dmf.mr.utils.ConfigurationReader;\r
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;\r
+import com.att.dmf.mr.utils.Emailer;\r
 import com.att.nsa.configs.ConfigDbException;\r
 import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;\r
 import com.att.nsa.limits.Blacklist;\r
@@ -191,6 +193,15 @@ public class MMServiceImplTest {
                }\r
 \r
        }\r
+       \r
+       @Test(expected = CambriaApiException.class)\r
+       public void testSubscribe_NullTopic_Error() throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,\r
+       CambriaApiException, IOException {\r
+\r
+                   PowerMockito.when(configReader.getfMetrics()).thenThrow(new ConcurrentModificationException("Error occurred"));\r
+                       PowerMockito.when(dmaapKafkaMetaBroker.getTopic(anyString())).thenReturn(metatopic);\r
+                       service.subscribe(dmaapContext, "testTopic", "CG1", "23");\r
+       }\r
 \r
        @Test\r
        public void testPushEvents_wttransaction() {\r
@@ -227,6 +238,20 @@ public class MMServiceImplTest {
                        e.printStackTrace();\r
                }\r
        }\r
+       \r
+       @Test(expected = CambriaApiException.class)\r
+       public void testPushEvents_wttransaction_error() throws Exception {\r
+\r
+               String source = "source of my InputStream";\r
+\r
+               InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8"));\r
+               PowerMockito.mockStatic(AJSCPropertiesMap.class);\r
+               PowerMockito.mockStatic(PropertiesMapBean.class);\r
+               PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "event.batch.length")).thenReturn("-5");\r
+               PowerMockito.when(configReader.getfPublisher()).thenThrow(new ConcurrentModificationException("Error occurred"));\r
+               service.pushEvents(dmaapContext, "msgrtr.apinode.metrics.dmaap1", iStream, "3", "12:00:00");\r
+\r
+       }\r
 \r
        @Test\r
        public void testPushEvents() {\r
index e40a1f6..1d5c9c0 100644 (file)
@@ -20,8 +20,8 @@
 \r
 package com.att.nsa.cambria.service.impl;\r
 \r
-import com.att.nsa.cambria.backends.Publisher.message;\r
-import com.att.nsa.cambria.beans.LogDetails;\r
+import com.att.dmf.mr.backends.Publisher.message;\r
+import com.att.dmf.mr.beans.LogDetails;\r
 \r
 public class MessageTest implements message {\r
 \r
index 8cb07d4..e372d38 100644 (file)
@@ -29,8 +29,9 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.CambriaApiException;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.service.impl.MetricsServiceImpl;\r
 \r
 public class MetricsServiceImplTest {\r
 \r
index 1193fe3..4dd3076 100644 (file)
@@ -54,20 +54,20 @@ import com.att.aft.dme2.internal.jettison.json.JSONException;
 import com.att.aft.dme2.internal.jettison.json.JSONObject;\r
 \r
 import com.att.ajsc.beans.PropertiesMapBean;\r
-import com.att.nsa.cambria.CambriaApiException;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker;\r
-import com.att.nsa.cambria.CambriaApiException;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.beans.TopicBean;\r
-import com.att.nsa.cambria.constants.CambriaConstants;\r
-import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;\r
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;\r
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;\r
-import com.att.nsa.cambria.metabroker.Topic;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticator;\r
-import com.att.nsa.cambria.utils.ConfigurationReader;\r
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.beans.TopicBean;\r
+import com.att.dmf.mr.constants.CambriaConstants;\r
+import com.att.dmf.mr.exception.DMaaPAccessDeniedException;\r
+import com.att.dmf.mr.exception.DMaaPErrorMessages;\r
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;\r
+import com.att.dmf.mr.metabroker.Topic;\r
+import com.att.dmf.mr.security.DMaaPAuthenticator;\r
+import com.att.dmf.mr.utils.ConfigurationReader;\r
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;\r
 import com.att.nsa.security.NsaApiKey;\r
 import com.att.nsa.security.db.simple.NsaSimpleApiKey;\r
 \r
index eb7e046..47b351e 100644 (file)
@@ -50,20 +50,21 @@ import org.powermock.modules.junit4.PowerMockRunner;
 \r
 import com.att.ajsc.beans.PropertiesMapBean;\r
 import com.att.ajsc.filemonitor.AJSCPropertiesMap;\r
-import com.att.nsa.cambria.CambriaApiException;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker;\r
-import com.att.nsa.cambria.beans.TopicBean;\r
-import com.att.nsa.cambria.constants.CambriaConstants;\r
-import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;\r
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;\r
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;\r
-import com.att.nsa.cambria.metabroker.Topic;\r
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticator;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticator;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;\r
-import com.att.nsa.cambria.utils.ConfigurationReader;\r
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;\r
+import com.att.dmf.mr.CambriaApiException;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;\r
+import com.att.dmf.mr.beans.TopicBean;\r
+import com.att.dmf.mr.constants.CambriaConstants;\r
+import com.att.dmf.mr.exception.DMaaPAccessDeniedException;\r
+import com.att.dmf.mr.exception.DMaaPErrorMessages;\r
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;\r
+import com.att.dmf.mr.metabroker.Topic;\r
+import com.att.dmf.mr.security.DMaaPAAFAuthenticator;\r
+import com.att.dmf.mr.security.DMaaPAuthenticator;\r
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;\r
+import com.att.dmf.mr.service.impl.TopicServiceImpl;\r
+import com.att.dmf.mr.utils.ConfigurationReader;\r
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;\r
 import com.att.nsa.configs.ConfigDbException;\r
 import com.att.nsa.security.NsaAcl;\r
 import com.att.nsa.security.NsaApiKey;\r
@@ -131,7 +132,7 @@ public class TopicServiceImplTest {
                PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user);\r
        }\r
 \r
-       @Test(expected = DMaaPAccessDeniedException.class)\r
+       @Test(expected = NullPointerException.class)\r
        public void testCreateTopicWithEnforcedName()\r
                        throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException {\r
                \r
@@ -143,6 +144,7 @@ public class TopicServiceImplTest {
 \r
                when(httpServReq.getHeader("AppName")).thenReturn("MyApp");\r
                when(dmaapContext.getRequest()).thenReturn(httpServReq);\r
+               when(dmaapContext.getResponse()).thenReturn(httpServRes);\r
                when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(nsaSimpleApiKey);\r
                when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
                when(dmaapContext.getConfigReader()).thenReturn(configReader);\r
@@ -154,7 +156,7 @@ public class TopicServiceImplTest {
 \r
        @Test\r
        public void testCreateTopicWithTopicNameNotEnforced()\r
-                       throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException {\r
+                       throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,IOException,TopicExistsException, com.att.dmf.mr.metabroker.Broker1.TopicExistsException {\r
 \r
                Assert.assertNotNull(topicService);\r
 \r
@@ -196,7 +198,7 @@ public class TopicServiceImplTest {
                                anyBoolean());\r
        }\r
 \r
-       @Test(expected = DMaaPAccessDeniedException.class)\r
+       @Test(expected = NullPointerException.class)\r
        public void testCreateTopicNoUserInContextAndNoAuthHeader()\r
                        throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException {\r
                \r
@@ -209,6 +211,7 @@ public class TopicServiceImplTest {
 \r
                when(httpServReq.getHeader("Authorization")).thenReturn(null);\r
                when(dmaapContext.getRequest()).thenReturn(httpServReq);\r
+               when(dmaapContext.getResponse()).thenReturn(httpServRes);\r
                when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null);\r
 \r
                when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
@@ -219,7 +222,7 @@ public class TopicServiceImplTest {
                topicService.createTopic(dmaapContext, topicBean);\r
        }\r
 \r
-       @Test(expected = DMaaPAccessDeniedException.class)\r
+       @Test(expected = NullPointerException.class)\r
        public void testCreateTopicNoUserInContextAndAuthHeaderAndPermitted()\r
                        throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException {\r
                \r
@@ -232,6 +235,7 @@ public class TopicServiceImplTest {
 \r
                when(httpServReq.getHeader("Authorization")).thenReturn("Authorization");\r
                when(dmaapContext.getRequest()).thenReturn(httpServReq);\r
+               when(dmaapContext.getResponse()).thenReturn(httpServRes);\r
                when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null);\r
 \r
                when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
@@ -469,7 +473,7 @@ public class TopicServiceImplTest {
                topicService.deleteTopic(dmaapContext, "topicNamespace.topic");\r
        }\r
        \r
-       @Test(expected=DMaaPAccessDeniedException.class)\r
+       /*@Test(expected=DMaaPAccessDeniedException.class)\r
        public void testdeleteTopic_authHeader() throws DMaaPAccessDeniedException, CambriaApiException, IOException,\r
                        TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {\r
 \r
@@ -493,7 +497,7 @@ public class TopicServiceImplTest {
                topicBean.setTopicName("enfTopicNamePlusExtra");\r
                PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(null);\r
                topicService.deleteTopic(dmaapContext, "topicNamespace.topic");\r
-       }\r
+       }*/\r
        \r
        @Test\r
        public void testPermitConsumerForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException,\r
@@ -667,6 +671,7 @@ public class TopicServiceImplTest {
                when(httpServReq.getHeader("AppName")).thenReturn("MyApp");\r
                when(httpServReq.getHeader("Authorization")).thenReturn("Admin");\r
                when(dmaapContext.getRequest()).thenReturn(httpServReq);\r
+               when(dmaapContext.getResponse()).thenReturn(httpServRes);\r
 \r
                when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);\r
                when(dmaapContext.getConfigReader()).thenReturn(configReader);\r
index 56dd83a..d7fcde4 100644 (file)
@@ -27,8 +27,9 @@ import static org.junit.Assert.*;
 import java.io.IOException;\r
 \r
 import com.att.aft.dme2.internal.jettison.json.JSONException;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.transaction.TransactionObj;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.service.impl.TransactionServiceImpl;\r
+import com.att.dmf.mr.transaction.TransactionObj;\r
 import com.att.nsa.configs.ConfigDbException;\r
 import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;\r
 \r
index 0523337..d47c2d6 100644 (file)
@@ -32,13 +32,14 @@ import java.util.List;
 import java.util.Map;\r
 import java.util.Set;\r
 \r
-import com.att.nsa.cambria.backends.ConsumerFactory;\r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
-import com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker;\r
-import com.att.nsa.cambria.metabroker.Topic;\r
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;\r
-import com.att.nsa.cambria.utils.ConfigurationReader;\r
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;\r
+import com.att.dmf.mr.backends.ConsumerFactory;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;\r
+import com.att.dmf.mr.metabroker.Topic;\r
+import com.att.dmf.mr.service.impl.UIServiceImpl;\r
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;\r
+import com.att.dmf.mr.utils.ConfigurationReader;\r
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;\r
 import com.att.nsa.configs.ConfigDbException;\r
 import com.att.nsa.security.db.NsaApiDb;\r
 import com.att.nsa.security.db.simple.NsaSimpleApiKey;\r
index fa06fa3..2e6aa21 100644 (file)
@@ -26,7 +26,7 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.transaction.TransactionObj;\r
+import com.att.dmf.mr.transaction.TransactionObj;\r
 \r
 public class TransactionObjTest {\r
 \r
index 32654de..40e59af 100644 (file)
@@ -26,8 +26,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
-import com.att.nsa.cambria.transaction.TransactionObj;\r
-import com.att.nsa.cambria.transaction.TrnRequest;\r
+import com.att.dmf.mr.transaction.TransactionObj;\r
+import com.att.dmf.mr.transaction.TrnRequest;\r
 \r
 public class TrnRequestTest {\r
 \r
index 4e71fd9..5260993 100644 (file)
@@ -26,6 +26,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory;\r
+\r
 public class DMaaPSimpleTransactionFactoryTest {\r
 \r
        @Before\r
index 4995754..907b935 100644 (file)
@@ -26,6 +26,7 @@ import org.junit.After;
 import org.junit.Test;\r
 \r
 import com.att.nsa.cambria.embed.EmbedConfigurationReader;\r
+import com.att.dmf.mr.utils.ConfigurationReader;\r
 \r
 public class ConfigurationReaderTest {\r
 \r
index 61d27c3..90e6f6f 100644 (file)
@@ -32,7 +32,9 @@ import org.junit.Before;
 import org.junit.Test;\r
 \r
 import com.att.ajsc.filemonitor.AJSCPropertiesMap;\r
-import com.att.nsa.cambria.constants.CambriaConstants;\r
+import com.att.dmf.mr.constants.CambriaConstants;\r
+import com.att.dmf.mr.utils.DMaaPCuratorFactory;\r
+import com.att.dmf.mr.utils.PropertyReader;\r
 import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException;\r
 import com.att.nsa.drumlin.till.nv.impl.nvPropertiesFile;\r
 import com.att.nsa.drumlin.till.nv.impl.nvReadableTable;\r
index 2b2a829..66e4405 100644 (file)
@@ -32,7 +32,8 @@ import org.junit.Test;
 import org.springframework.mock.web.MockHttpServletRequest;\r
 import org.springframework.mock.web.MockHttpServletResponse;\r
 \r
-import com.att.nsa.cambria.beans.DMaaPContext;\r
+import com.att.dmf.mr.beans.DMaaPContext;\r
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;\r
 \r
 public class DMaaPResponseBuilderTest {\r
        \r
index d304485..bff4d48 100644 (file)
@@ -35,7 +35,8 @@ import org.junit.Before;
 import org.junit.Test;
 import org.springframework.mock.web.MockHttpServletRequest;
 
-import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.utils.Utils;
 
 public class UtilsTest {
 
index fba35c6..6930f73 100644 (file)
@@ -29,6 +29,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.mr.filter.ContentLengthFilter;\r
+\r
 public class ContentLengthFilterTest {\r
 \r
        @Before\r
index 4ed96e3..8be5ca7 100644 (file)
@@ -29,6 +29,8 @@ import org.junit.After;
 import org.junit.Before;\r
 import org.junit.Test;\r
 \r
+import com.att.mr.filter.DefaultLength;\r
+\r
 public class DefaultLengthTest {\r
 \r
        @Before\r
index e7654d1..7809677 100644 (file)
@@ -27,7 +27,7 @@
 
 major=1
 minor=1
-patch=4
+patch=5
 
 base_version=${major}.${minor}.${patch}