DMAAP-MR - Merge MR repos 48/121248/6
authordavid.mcweeney <david.mcweeney@est.tech>
Thu, 6 May 2021 15:49:48 +0000 (16:49 +0100)
committerdavid.mcweeney <david.mcweeney@est.tech>
Thu, 24 Jun 2021 08:57:54 +0000 (09:57 +0100)
Issue-ID: DMAAP-1582
Signed-off-by: david.mcweeney <david.mcweeney@est.tech>
Change-Id: I727cffc687a7ea79abbd858affafc823c0f96f0b

289 files changed:
.gitignore [new file with mode: 0644]
docs/.gitignore [deleted file]
etc/cambriaApi.properties [new file with mode: 0644]
etc/cambriaApi_template.properties [new file with mode: 0644]
etc/log4j2.xml [new file with mode: 0644]
etc/log4j2_template.xml [new file with mode: 0644]
etc/logstash_cambria_template.conf [new file with mode: 0644]
pom.xml
src/main/java/org/onap/dmaap/DMaaPCambriaExceptionMapper.java
src/main/java/org/onap/dmaap/dmf/mr/CambriaApiException.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/CambriaApiVersionInfo.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/Consumer.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/ConsumerFactory.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/MetricsSet.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/Publisher.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011Consumer.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumer.txt [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumerCache.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaPublisher.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LiveLockAvoidance.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LockInstructionWatcher.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryConsumerFactory.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryMetaBroker.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueue.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueuePublisher.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MessageLogger.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/beans/ApiKeyBean.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPCambriaLimiter.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPContext.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaConsumerFactory.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaMetaBroker.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPMetricsSet.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPNsaApiDb.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkClient.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkConfigDb.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/beans/LogDetails.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/beans/TopicBean.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/constants/CambriaConstants.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPAccessDeniedException.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPCambriaExceptionMapper.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPErrorMessages.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPResponseCode.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPWebExceptionMapper.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/exception/ErrorResponse.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/listener/CambriaServletContextListener.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/listener/DME2EndPointLoader.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker1.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metabroker/Topic.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaClient.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaConsumer.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisher.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisherUtility.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/Clock.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaEventSet.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaOutboundEventStream.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaStreamReader.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticator.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticator.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticatorImpl.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/AdminService.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/ApiKeysService.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/EventsService.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/MMService.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/MetricsService.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/TopicService.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/TransactionService.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/UIService.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/impl/AdminServiceImpl.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/impl/ApiKeysServiceImpl.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/impl/BaseTransactionDbImpl.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/impl/ErrorResponseProvider.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImpl.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/impl/MMServiceImpl.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/impl/MetricsServiceImpl.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImpl.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/impl/TransactionServiceImpl.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/service/impl/UIServiceImpl.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionFactory.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObj.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObjDB.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/transaction/TransactionObj.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/transaction/TrnRequest.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/utils/ConfigurationReader.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPCuratorFactory.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPResponseBuilder.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/utils/Emailer.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/utils/PropertyReader.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/dmf/mr/utils/Utils.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/mr/filter/ContentLengthFilter.java [new file with mode: 0644]
src/main/java/org/onap/dmaap/mr/filter/DefaultLength.java [new file with mode: 0644]
src/main/resources/DMaaPUrl.properties [new file with mode: 0644]
src/main/resources/application.properties [new file with mode: 0644]
src/main/resources/cambriaApiVersion.properties [new file with mode: 0644]
src/main/resources/dme2testcase.properties [new file with mode: 0644]
src/main/resources/endpoint.properties [new file with mode: 0644]
src/main/resources/images/attLogo.gif [new file with mode: 0644]
src/main/resources/images/att_vt_1cp_grd_rev.gif [new file with mode: 0644]
src/main/resources/kafka.properties [new file with mode: 0644]
src/main/resources/templates/hello.html [new file with mode: 0644]
src/test/java/org/onap/dmaap/DMaaPWebExceptionMapperTest.java
src/test/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticatorImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSenderTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/CambriaApiExceptionTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/CambriaApiTestCase.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/CambriaApiVersionInfoTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/CambriaRateLimiterTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/CuratorFrameworkImpl.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/Kafka011ConsumerTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaConsumerCacheTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaLiveLockAvoider2Test.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaPublisherTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/MetricsSetImpl.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/memory/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryConsumerFactoryTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryMetaBrokerTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueuePublisherTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueueTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MessageLoggerTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/backends/memory/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest2.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest3.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest4.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest5.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest6.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPCambriaLimiterTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest2.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest3.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest4.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest5.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest6.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPKafkaConsumerFactoryTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPKafkaMetaBrokerTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest10.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest11.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest12.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest13.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest14.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest15.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest16.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest17.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest18.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest2.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest3.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest4.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest5.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest6.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest7.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest8.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest9.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest10.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest2.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest3.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest4.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest5.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest6.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest7.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest8.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest9.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/embed/KafkaLocal.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/embed/ZooKeeperLocal.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPCambriaExceptionMapperTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPErrorMessagesTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPWebExceptionMapperTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/exception/ErrorResponseTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/exception/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/exception/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/listener/CambriaServletContextListenerTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/listener/DME2EndPointLoaderTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/listener/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/listener/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImpl.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metabroker/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metabroker/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplem.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplemTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/CambriaPublisherUtilityTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/DMaaPCambriaClientFactoryTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/CambriaBaseClientTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/ClockTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisherTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaEventSetTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaOutboundEventStreamTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/resources/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/resources/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaJsonStreamReaderTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaRawStreamReaderTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaStreamReaderTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaTextStreamReaderTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/security/DMaaPAuthenticatorImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/security/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/security/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPMechIdAuthenticatorTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPOriginalUebAuthenticatorTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/security/impl/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/security/impl/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/service/impl/AdminServiceImplemTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/service/impl/ApiKeysServiceImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/service/impl/BaseTransactionDbImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/service/impl/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/service/impl/MMServiceImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/service/impl/MessageTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/service/impl/MetricsServiceImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/service/impl/ShowConsumerCacheTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/service/impl/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/service/impl/TransactionServiceImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/service/impl/UIServiceImplTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/transaction/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/transaction/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/transaction/TransactionObjTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/transaction/TrnRequestTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/DMaaPSimpleTransactionFactoryTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/utils/ConfigurationReaderTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPCuratorFactoryTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPResponseBuilderTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/utils/EMailerTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/cambria/utils/UtilsTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/filter/ContentLengthFilterTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/filter/DefaultLengthTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/filter/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/filter/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dmaap/ApiKeyBean.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapPubSubTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapTopicTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapAdminTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapApiKeyTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapMetricsTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dmaap/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dmaap/LoadPropertyFile.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dmaap/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dmaap/TopicBean.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dme2/ApiKeyBean.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dme2/DME2AdminTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dme2/DME2ApiKeyTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerFilterTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dme2/DME2MetricsTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dme2/DME2ProducerTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dme2/DME2TopicTest.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dme2/JUnitTestSuite.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dme2/LoadPropertyFile.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dme2/TestRunner.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/mr/test/dme2/TopicBeanDME2.java [new file with mode: 0644]
src/test/java/org/onap/dmaap/service/TopicRestServiceTest.java
src/test/resources/DMaaPErrorMesaages.properties [new file with mode: 0644]
src/test/resources/MsgRtrApi.properties [new file with mode: 0644]
src/test/resources/spring-context.xml [new file with mode: 0644]
version.properties

diff --git a/.gitignore b/.gitignore
new file mode 100644 (file)
index 0000000..97e81c5
--- /dev/null
@@ -0,0 +1,9 @@
+/.tox
+/_build/*
+/__pycache__/*
+*.jsonld
+/dme2-fs-registry/*
+*.iml
+/target/*
+.idea/*
+/src/.idea/*
\ No newline at end of file
diff --git a/docs/.gitignore b/docs/.gitignore
deleted file mode 100644 (file)
index ed44ee3..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-/.tox
-/_build/*
-/__pycache__/*
-*.jsonld
\ No newline at end of file
diff --git a/etc/cambriaApi.properties b/etc/cambriaApi.properties
new file mode 100644 (file)
index 0000000..b3a2b68
--- /dev/null
@@ -0,0 +1,138 @@
+###############################################################################
+#  ============LICENSE_START=======================================================
+#  org.onap.dmaap
+#  ================================================================================
+#  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+#  ================================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#        http://www.apache.org/licenses/LICENSE-2.0
+#  
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=========================================================
+#
+#  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#  
+###############################################################################
+###############################################################################
+##
+## Cambria API Server config
+##
+##     - Default values are shown as commented settings.
+##
+
+###############################################################################
+##
+## HTTP service
+##
+##             - 3904 is standard as of 7/29/14.
+##             - At this time, Cambria always binds to 0.0.0.0
+##
+#cambria.service.port=3904
+#tomcat.maxthreads=(tomcat default, which is usually 200)
+
+###############################################################################
+##
+## Broker Type
+##
+##     The Cambria server can run either as a memory-only implementation, meant
+##     for testing, or against Kafka. For a memory-only server, use "memory" for
+##     the broker.type setting.
+##
+#broker.type=kafka
+
+###############################################################################
+##
+## Zookeeper Connection
+##
+##     Both Cambria and Kafka make use of Zookeeper.
+##
+#config.zk.servers=localhost
+#config.zk.root=/fe3c/cambria/config
+
+
+###############################################################################
+##
+## Kafka Connection
+##
+##     Items below are passed through to Kafka's producer and consumer
+##     configurations (after removing "kafka.")
+##
+#kafka.metadata.broker.list=localhost:9092
+#kafka.client.zookeeper=${config.zk.servers}
+
+###############################################################################
+##
+##     Secured Config
+##
+##     Some data stored in the config system is sensitive -- API keys and secrets,
+##     for example. to protect it, we use an encryption layer for this section
+##     of the config.
+##
+## The key is a base64 encode AES key. This must be created/configured for
+## each installation.
+#cambria.secureConfig.key=
+##
+## The initialization vector is a 16 byte value specific to the secured store.
+## This must be created/configured for each installation.
+#cambria.secureConfig.iv=
+
+## Southfield Sandbox
+#cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q==
+#cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw==
+
+cambria.secureConfig.key[pc569h]=YT3XPyxEmKCTLI2NK+Sjbw==
+cambria.secureConfig.iv[pc569h]=rMm2jhR3yVnU+u2V9Ugu3Q==
+
+
+###############################################################################
+##
+## Consumer Caching
+##
+##     Kafka expects live connections from the consumer to the broker, which
+##     obviously doesn't work over connectionless HTTP requests. The Cambria
+##     server proxies HTTP requests into Kafka consumer sessions that are kept
+##     around for later re-use. Not doing so is costly for setup per request,
+##     which would substantially impact a high volume consumer's performance.
+##
+##     This complicates Cambria server failover, because we often need server
+##     A to close its connection before server B brings up the replacement.    
+##
+
+## The consumer cache is normally enabled.
+#cambria.consumer.cache.enabled=true
+
+## Cached consumers are cleaned up after a period of disuse. The server inspects
+## consumers every sweepFreqSeconds and will clean up any connections that are
+## dormant for touchFreqMs.
+#cambria.consumer.cache.sweepFreqSeconds=15
+#cambria.consumer.cache.touchFreqMs=120000
+
+## The cache is managed through ZK. The default value for the ZK connection
+## string is the same as config.zk.servers.
+#cambria.consumer.cache.zkConnect=${config.zk.servers}
+
+##
+## Shared cache information is associated with this node's name. The default
+## name is the hostname plus the HTTP service port this host runs on. (The
+## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(),
+## which is not always adequate.) You can set this value explicitly here.
+##
+#cambria.api.node.identifier=<use-something-unique-to-this-instance>
+
+###############################################################################
+##
+## Metrics Reporting
+##
+##     This server can report its metrics periodically on a topic.
+##
+#metrics.send.cambria.enabled=true
+#metrics.send.cambria.baseUrl=localhost
+#metrics.send.cambria.topic=cambria.apinode.metrics
+#metrics.send.cambria.sendEverySeconds=60
+
diff --git a/etc/cambriaApi_template.properties b/etc/cambriaApi_template.properties
new file mode 100644 (file)
index 0000000..59ed815
--- /dev/null
@@ -0,0 +1,130 @@
+###############################################################################
+#  ============LICENSE_START=======================================================
+#  org.onap.dmaap
+#  ================================================================================
+#  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+#  ================================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#        http://www.apache.org/licenses/LICENSE-2.0
+#  
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=========================================================
+#
+#  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#  
+###############################################################################
+###############################################################################
+##
+## Cambria API Server config
+##
+##     - Default values are shown as commented settings.
+##
+
+
+###############################################################################
+##
+## HTTP service
+##
+##             - 3904 is standard as of 7/29/14.
+##             - At this time, Cambria always binds to 0.0.0.0
+##
+cambria.service.port=${CAMBRIA_SERVICE_PORT}
+
+###############################################################################
+##
+## Broker Type
+##
+##     The Cambria server can run either as a memory-only implementation, meant
+##     for testing, or against Kafka. For a memory-only server, use "memory" for
+##     the broker.type setting.
+##
+broker.type=${CAMBRIA_BROKER_TYPE}
+
+###############################################################################
+##
+## Zookeeper Connection
+##
+##     Both Cambria and Kafka make use of Zookeeper.
+#
+config.zk.servers=${CAMBRIA_ZOOKEEPER_NODES}
+config.zk.root=/fe3c/cambria/config
+
+
+###############################################################################
+##
+## Kafka Connection
+##
+##     Items below are passed through to Kafka's producer and consumer
+##     configurations (after removing "kafka.")
+##
+kafka.metadata.broker.list=${KAFKA_BROKER_LIST}
+kafka.client.zookeeper=${CAMBRIA_ZOOKEEPER_NODES}
+
+###############################################################################
+##
+##     Secured Config
+##
+##     Some data stored in the config system is sensitive -- API keys and secrets,
+##     for example. to protect it, we use an encryption layer for this section
+##     of the config.
+##
+## The key is a base64 encode AES key. This must be created/configured for
+## each installation.
+#cambria.secureConfig.key=
+##
+## The initialization vector is a 16 byte value specific to the secured store.
+## This must be created/configured for each installation.
+#cambria.secureConfig.iv=
+
+###############################################################################
+##
+## Consumer Caching
+##
+##     Kafka expects live connections from the consumer to the broker, which
+##     obviously doesn't work over connectionless HTTP requests. The Cambria
+##     server proxies HTTP requests into Kafka consumer sessions that are kept
+##     around for later re-use. Not doing so is costly for setup per request,
+##     which would substantially impact a high volume consumer's performance.
+##
+##     This complicates Cambria server failover, because we often need server
+##     A to close its connection before server B brings up the replacement.    
+##
+
+## The consumer cache is normally enabled.
+cambria.consumer.cache.enabled=true
+
+## Cached consumers are cleaned up after a period of disuse. The server inspects
+## consumers every sweepFreqSeconds and will clean up any connections that are
+## dormant for touchFreqMs.
+cambria.consumer.cache.sweepFreqSeconds=15
+cambria.consumer.cache.touchFreqMs=120000
+
+## The cache is managed through ZK. The default value for the ZK connection
+## string is the same as config.zk.servers.
+cambria.consumer.cache.zkConnect=${CAMBRIA_ZOOKEEPER_NODES}
+
+##
+## Shared cache information is associated with this node's name. The default
+## name is the hostname plus the HTTP service port this host runs on. (The
+## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(),
+## which is not always adequate.) You can set this value explicitly here.
+##
+#cambria.api.node.identifier=<use-something-unique-to-this-instance>
+
+###############################################################################
+##
+## Metrics Reporting
+##
+##     This server can report its metrics periodically on a topic.
+##
+metrics.send.cambria.enabled=true
+metrics.send.cambria.baseUrl=localhost:${CAMBRIA_SERVICE_PORT}
+metrics.send.cambria.topic=cambria.apinode.metrics
+metrics.send.cambria.sendEverySeconds=60
+
diff --git a/etc/log4j2.xml b/etc/log4j2.xml
new file mode 100644 (file)
index 0000000..0918f35
--- /dev/null
@@ -0,0 +1,62 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ============LICENSE_START=======================================================
+  Copyright (C) 2021 Nordix Foundation.
+  ================================================================================
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+
+  SPDX-License-Identifier: Apache-2.0
+  ============LICENSE_END=========================================================
+  -->
+
+<Configuration status="WARN">
+    <Appenders>
+        <!-- Console Appender -->
+        <Console name="STDOUT" target="SYSTEM_OUT">
+            <PatternLayout pattern="[%d{ISO8601}{GMT+0} GMT][%-10t][%-5p]%m%n"/>
+        </Console>
+
+        <!-- Rolling File Appender -->
+        <RollingFile name="rollingFile">
+            <FileName>./logs/cambria.log</FileName>
+            <FilePattern>./logs/${date:yyyy-MM}/cambria-%d{yyyy-MM-dd}-%i.log</FilePattern>
+            <PatternLayout>
+                <Pattern>[%d{ISO8601}{GMT+0} GMT][%-10t][%-5p]%m%n</Pattern>
+            </PatternLayout>
+            <Policies>
+                <SizeBasedTriggeringPolicy size="128 MB"/>
+            </Policies>
+            <DefaultRolloverStrategy max="10"/>
+        </RollingFile>
+
+        <!-- Rolling File Appender -->
+        <RollingFile name="rollingFile_ECOMP_ERROR">
+            <FileName>./logs/error.log</FileName>
+            <FilePattern>./logs/${date:yyyy-MM}/error-%d{yyyy-MM-dd}-%i.log</FilePattern>
+            <PatternLayout>
+                <Pattern>%d{yyyy-MM-dd'T'HH:mm:ss}{GMT+0}+00:00|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{instanceUuid}|%p|%X{severity}|%X{serverIpAddress}|%X{server}|%X{ipAddress}|%X{className}|%X{timer}|%m%n</Pattern>
+            </PatternLayout>
+            <Policies>
+                <SizeBasedTriggeringPolicy size="128 MB"/>
+            </Policies>
+            <DefaultRolloverStrategy max="10"/>
+        </RollingFile>
+    </Appenders>
+    <Loggers>
+        <Root level="info">
+            <AppenderRef ref="STDOUT"/>
+            <AppenderRef ref="rollingFile"/>
+            <AppenderRef ref="rollingFile_ECOMP_ERROR"/>
+        </Root>
+    </Loggers>
+</Configuration>
\ No newline at end of file
diff --git a/etc/log4j2_template.xml b/etc/log4j2_template.xml
new file mode 100644 (file)
index 0000000..1c70479
--- /dev/null
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ============LICENSE_START=======================================================
+  Copyright (C) 2021 Nordix Foundation.
+  ================================================================================
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+
+  SPDX-License-Identifier: Apache-2.0
+  ============LICENSE_END=========================================================
+  -->
+
+<Configuration status="WARN">
+    <Appenders>
+        <!-- Console Appender -->
+        <Console name="STDOUT" target="SYSTEM_OUT">
+            <PatternLayout pattern="[%d{ISO8601}{GMT+0} GMT][%-10t][%-5p][%X{serverIp}]%m%n"/>
+        </Console>
+
+        <!-- Rolling File Appender -->
+        <RollingFile name="rollingFile">
+            <FileName>./${CAMBRIA_LOG_DIR}/cambria.log</FileName>
+            <FilePattern>./${CAMBRIA_LOG_DIR}/${date:yyyy-MM}/cambria-%d{yyyy-MM-dd}-%i.log</FilePattern>
+            <PatternLayout>
+                <Pattern>%d{yyyy-MM-dd'T'HH:mm:ss}{GMT+0}+00:00|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{instanceUuid}|%p|%X{severity}|%X{serverIpAddress}|%X{server}|%X{ipAddress}|%X{className}|%X{timer}|%m%n</Pattern>
+            </PatternLayout>
+            <Policies>
+                <SizeBasedTriggeringPolicy size="128 MB"/>
+            </Policies>
+            <DefaultRolloverStrategy max="10"/>
+        </RollingFile>
+
+        <!-- Rolling File Appender -->
+        <RollingFile name="rollingFile_ECOMP_ERROR">
+            <FileName>./${CAMBRIA_LOG_DIR}/error.log</FileName>
+            <FilePattern>./${CAMBRIA_LOG_DIR}/${date:yyyy-MM}/error-%d{yyyy-MM-dd}-%i.log</FilePattern>
+            <PatternLayout>
+                <Pattern>%d{yyyy-MM-dd'T'HH:mm:ss}{GMT+0}+00:00|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{instanceUuid}|%p|%X{severity}|%X{serverIpAddress}|%X{server}|%X{ipAddress}|%X{className}|%X{timer}|%m%n</Pattern>
+            </PatternLayout>
+            <Policies>
+                <SizeBasedTriggeringPolicy size="128 MB"/>
+            </Policies>
+            <DefaultRolloverStrategy max="10"/>
+        </RollingFile>
+    </Appenders>
+    <Loggers>
+        <Root level="${CAMBRIA_LOG_THRESHOLD}">
+            <AppenderRef ref="rollingFile"/>
+        </Root>
+    </Loggers>
+</Configuration>
\ No newline at end of file
diff --git a/etc/logstash_cambria_template.conf b/etc/logstash_cambria_template.conf
new file mode 100644 (file)
index 0000000..2ddb7d5
--- /dev/null
@@ -0,0 +1,36 @@
+input {
+        file {
+                path => "${CAMBRIA_SERVER_LOG}"
+                codec => multiline {
+                        pattern => "^\[%{YEAR}-%{MONTH}-%{MONTHDAY}%{SPACE}%{HOUR}:%{MINUTE}:%{SECOND}\]"
+                        negate => "true"
+                        what => "previous"
+                }
+                sincedb_path => "/opt/app/logstash/conf_stage/.sincedb_cambria"
+                start_position => "beginning"
+                type => "cambria"
+        }
+}
+
+filter {
+       if [type] == "cambria" {
+               grok {
+                       match => ["message", "\[(?<date>%{YEAR}-%{MONTH}-%{MONTHDAY}%{SPACE}%{HOUR}:%{MINUTE}:%{SECOND})\]\[%{DATA:logLevel}\]\[%{DATA:thread}\]\[%{DATA:class}\]\[%{DATA:id}\]%{GREEDYDATA:message}"]
+               }
+       
+               date {
+                       match => ["date", "YYYY-MMM-DD HH:mm:ss,SSS"]
+               }
+       }
+}
+
+output {
+       if [type] == "cambria" {
+            elasticsearch {
+                    cluster => "2020SA"
+                    host => "${ELASTICSEARCH_NODES}"
+                    index => "cambria-%{+YYYY.MM.dd}"
+            }
+    }
+}
+
diff --git a/pom.xml b/pom.xml
index 429f6c8..c95e686 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -1,27 +1,27 @@
-<!-- ============LICENSE_START======================================================= 
-       org.onap.dmaap ================================================================================ 
-       Copyright © 2017 AT&T Intellectual Property. All rights reserved. ================================================================================ 
-       Licensed under the Apache License, Version 2.0 (the "License"); you may not 
-       use this file except in compliance with the License. You may obtain a copy 
-       of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required 
-       by applicable law or agreed to in writing, software distributed under the 
-       License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS 
-       OF ANY KIND, either express or implied. See the License for the specific 
-       language governing permissions and limitations under the License. ============LICENSE_END========================================================= 
+<!-- ============LICENSE_START=======================================================
+       org.onap.dmaap ================================================================================
+       Copyright © 2017 AT&T Intellectual Property. All rights reserved. ================================================================================
+       Licensed under the Apache License, Version 2.0 (the "License"); you may not
+       use this file except in compliance with the License. You may obtain a copy
+       of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
+       by applicable law or agreed to in writing, software distributed under the
+       License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+       OF ANY KIND, either express or implied. See the License for the specific
+       language governing permissions and limitations under the License. ============LICENSE_END=========================================================
        ECOMP is a trademark and service mark of AT&T Intellectual Property. -->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-       xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+                xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
        <modelVersion>4.0.0</modelVersion>
 
        <parent>
                <groupId>org.onap.oparent</groupId>
                <artifactId>oparent</artifactId>
-               <version>2.1.0</version>
+               <version>3.2.0</version>
        </parent>
 
        <groupId>org.onap.dmaap.messagerouter.messageservice</groupId>
        <artifactId>dmaapMR1</artifactId>
-       <version>1.2.20-SNAPSHOT</version>
+       <version>1.3.0-SNAPSHOT</version>
        <name>dmaap-messagerouter-messageservice</name>
        <description>Message Router - Restful interface built for kafka</description>
        <licenses>
                </developer>
        </developers>
 
+       <properties>
+               <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+               <dockerLocation>${basedir}/target/swm/package/nix/dist_files/</dockerLocation>
+               <docker.image.prefix>simpledemo</docker.image.prefix>
+               <!-- <javax-mail-version>1.5.0</javax-mail-version> -->
+               <module.ajsc.namespace.name>dmaap</module.ajsc.namespace.name>
+               <module.ajsc.namespace.version>v1</module.ajsc.namespace.version>
+               <ajscRuntimeVersion>3.0.11-oss</ajscRuntimeVersion>
+
+               <!-- This will be the Absolute Root of the Project and should contain NO
+                       Versioning -->
+               <absoluteDistFilesRoot>/appl/${project.artifactId}</absoluteDistFilesRoot>
+               <!-- <absoluteDistFilesRoot>/opt/app/dmaap/${project.artifactId}</absoluteDistFilesRoot> -->
+               <!-- For Versioning upon installation, add /${project.version} to distFilesRoot.
+                       For NO Versioning, leave as is -->
+               <!-- example: /appl/${project.artifactId}/${project.version}. Also, add
+                       ${project.version} to ${runAjscHome} for running locally. -->
+               <distFilesRoot>/appl/${project.artifactId}</distFilesRoot>
+               <!-- <distFilesRoot>/opt/app/dmaap/${project.artifactId}</distFilesRoot> -->
+               <runAjscHome>${basedir}/target/swm/package/nix/dist_files${distFilesRoot}</runAjscHome>
+
+               <!-- For SOA Cloud Installation <installOwnerUser>aft</installOwnerUser>
+                       <installOwnerGroup>aft</installOwnerGroup> <ownerManagementGroup>com.att.acsi.saat.dt.dmaap.dev</ownerManagementGroup> -->
+               <!-- For SOA Cloud Installation -->
+               <installOwnerUser>msgrtr</installOwnerUser>
+               <installOwnerGroup>dmaap</installOwnerGroup>
+               <ownerManagementGroup>com.att.acsi.saat.dt.dmaap.dev</ownerManagementGroup>
+               <!-- Port Selection. A value of 0 will allow for dynamic port selection.
+                       For local testing, you may choose to hardcode this value to something like
+                       8080 -->
+               <serverPort>3904</serverPort>
+               <sslport>3905</sslport>
+               <onap.nexus.url>https://nexus.onap.org</onap.nexus.url>
+               <testRouteOffer>workstation</testRouteOffer>
+               <testEnv>DEV</testEnv>
+               <!-- <dmaapImg>${project.version}</dmaapImg> -->
+               <timestamp>${maven.build.timestamp}</timestamp>
+               <maven.build.timestamp.format>yyyyMMdd'T'HHmmss'Z'</maven.build.timestamp.format>
+               <camel.version>2.21.5</camel.version>
+               <camel.version.latest>3.5.0</camel.version.latest>
+               <sitePath>/content/sites/site/org/onap/dmaap/messagerouter/messageservice/${project.artifactId}/${project.version}</sitePath>
+               <skip.docker.build>true</skip.docker.build>
+               <skip.docker.tag>true</skip.docker.tag>
+               <skip.docker.push>true</skip.docker.push>
+               <nexusproxy>https://nexus.onap.org</nexusproxy>
+               <docker.push.registry>nexus3.onap.org:10003</docker.push.registry>
+               <spring.version>3.2.18.RELEASE</spring.version>
+               <sonar.language>java</sonar.language>
+               <sonar.skip>false</sonar.skip>
+               <sonar.surefire.reportsPath>${project.build.directory}/surefire-reports</sonar.surefire.reportsPath>
+               <sonar.coverage.jacoco.xmlReportPaths>${project.reporting.outputDirectory}/jacoco-ut/jacoco.xml</sonar.coverage.jacoco.xmlReportPaths>
+               <sonar.projectVersion>${project.version}</sonar.projectVersion>
+               <log4j.version>2.13.3</log4j.version>
+               <maven.compiler.target>8</maven.compiler.target>
+               <maven.compiler.source>8</maven.compiler.source>
+       </properties>
+
        <build>
                <plugins>
-             <plugin>
-                    <groupId>org.jacoco</groupId>
-                    <artifactId>jacoco-maven-plugin</artifactId>
-                    <executions>
-                        <execution>
-                            <id>prepare-agent</id>
-                            <goals>
-                                <goal>prepare-agent</goal>
-                            </goals>
-                        </execution>
-                        <execution>
-                            <id>report</id>
-                            <goals>
-                                <goal>report</goal>
-                            </goals>
-                            <configuration>
-                                <dataFile>${project.build.directory}/code-coverage/jacoco.exec</dataFile>
-                                <outputDirectory>${project.reporting.outputDirectory}/jacoco-ut</outputDirectory>
-                            </configuration>
-                        </execution>
-                    </executions>
-                </plugin>
+                       <plugin>
+                       <artifactId>maven-checkstyle-plugin</artifactId>
+                       <executions>
+                               <execution>
+                                       <id>onap-java-style</id>
+                                       <configuration>
+                                               <consoleOutput>false</consoleOutput>
+                                       </configuration>
+                               </execution>
+                       </executions>
+               </plugin>
                        <plugin>
                                <groupId>org.apache.maven.plugins</groupId>
                                <artifactId>maven-site-plugin</artifactId>
                                        </dependency>
                                </dependencies>
                        </plugin>
-                
+
 
                        <plugin>
                                <groupId>com.blackducksoftware.integration</groupId>
                                        </execution>
                                </executions>
                        </plugin>
-
-                       <!-- <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> 
-                               <version>3.1</version> <configuration> <compilerId>groovy-eclipse-compiler</compilerId> 
-                               <verbose>true</verbose> <source>1.7</source> <target>1.7</target> </configuration> 
-                               <dependencies> <dependency> <groupId>org.codehaus.groovy</groupId> <artifactId>groovy-eclipse-compiler</artifactId> 
-                               <version>2.8.0-01</version> </dependency> <dependency> <groupId>org.codehaus.groovy</groupId> 
-                               <artifactId>groovy-eclipse-batch</artifactId> <version>2.1.5-03</version> 
-                               </dependency> </dependencies> </plugin> -->
                        <plugin>
                                <groupId>org.codehaus.groovy</groupId>
                                <artifactId>groovy-eclipse-compiler</artifactId>
                </plugins>
        </build>
 
-       <properties>
-               <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-               <dockerLocation>${basedir}/target/swm/package/nix/dist_files/</dockerLocation>
-               <docker.image.prefix>simpledemo</docker.image.prefix>
-               <!-- <javax-mail-version>1.5.0</javax-mail-version> -->
-               <module.ajsc.namespace.name>dmaap</module.ajsc.namespace.name>
-               <module.ajsc.namespace.version>v1</module.ajsc.namespace.version>
-               <ajscRuntimeVersion>3.0.11-oss</ajscRuntimeVersion>
-
-               <!-- This will be the Absolute Root of the Project and should contain NO 
-                       Versioning -->
-               <absoluteDistFilesRoot>/appl/${project.artifactId}</absoluteDistFilesRoot>
-               <!-- <absoluteDistFilesRoot>/opt/app/dmaap/${project.artifactId}</absoluteDistFilesRoot> -->
-               <!-- For Versioning upon installation, add /${project.version} to distFilesRoot. 
-                       For NO Versioning, leave as is -->
-               <!-- example: /appl/${project.artifactId}/${project.version}. Also, add 
-                       ${project.version} to ${runAjscHome} for running locally. -->
-               <distFilesRoot>/appl/${project.artifactId}</distFilesRoot>
-               <!-- <distFilesRoot>/opt/app/dmaap/${project.artifactId}</distFilesRoot> -->
-               <runAjscHome>${basedir}/target/swm/package/nix/dist_files${distFilesRoot}</runAjscHome>
-
-               <!-- For SOA Cloud Installation <installOwnerUser>aft</installOwnerUser> 
-                       <installOwnerGroup>aft</installOwnerGroup> <ownerManagementGroup>com.att.acsi.saat.dt.dmaap.dev</ownerManagementGroup> -->
-               <!-- For SOA Cloud Installation -->
-               <installOwnerUser>msgrtr</installOwnerUser>
-               <installOwnerGroup>dmaap</installOwnerGroup>
-               <ownerManagementGroup>com.att.acsi.saat.dt.dmaap.dev</ownerManagementGroup>
-               <!-- Port Selection. A value of 0 will allow for dynamic port selection. 
-                       For local testing, you may choose to hardcode this value to something like 
-                       8080 -->
-               <serverPort>3904</serverPort>
-               <sslport>3905</sslport>
-        <onap.nexus.url>https://nexus.onap.org</onap.nexus.url>
-               <testRouteOffer>workstation</testRouteOffer>
-               <testEnv>DEV</testEnv>
-               <!-- <dmaapImg>${project.version}</dmaapImg> -->
-               <timestamp>${maven.build.timestamp}</timestamp>
-        <maven.build.timestamp.format>yyyyMMdd'T'HHmmss'Z'</maven.build.timestamp.format>
-               <camel.version>2.21.5</camel.version>
-               <camel.version.latest>3.5.0</camel.version.latest>
-               <sitePath>/content/sites/site/org/onap/dmaap/messagerouter/messageservice/${project.artifactId}/${project.version}</sitePath>
-               <skip.docker.build>true</skip.docker.build>
-               <skip.docker.tag>true</skip.docker.tag>
-               <skip.docker.push>true</skip.docker.push>
-               <nexusproxy>https://nexus.onap.org</nexusproxy>
-               <docker.push.registry>nexus3.onap.org:10003</docker.push.registry>
-               <spring.version>3.2.18.RELEASE</spring.version>
-               <sonar.language>java</sonar.language>
-        <sonar.skip>false</sonar.skip>
-        <sonar.surefire.reportsPath>${project.build.directory}/surefire-reports</sonar.surefire.reportsPath>
-        <sonar.coverage.jacoco.xmlReportPaths>${project.reporting.outputDirectory}/jacoco-ut/jacoco.xml</sonar.coverage.jacoco.xmlReportPaths>
-        <sonar.projectVersion>${project.version}</sonar.projectVersion>
-       </properties>
-
        <!-- Distribution management -->
        <distributionManagement>
                <site>
 
        <!-- End Distribution management -->
 
-       <!-- The standard build tasks for this project are inherited from the parent. 
-               Please do not override the build tasks. However tasks and/or profiles can 
-               be included here as well as additional dependencies for your service. Any 
-               runtime or compile scope dependencies will be copied to the INSTALLATION_PATH/extJars 
-               folder and will be made available on the AJSC classpath for your service. 
-               Please, NOTE: DME2 and CSM related dependencies are EXTERNALIZED within the 
-               CSI environment. Therefore, they are provided within this project as "provided" 
-               dependencies. In order for the AJSC to run properly, locally, the CSM and 
-               DME2 dependencies will be copied into the target/commonLibs folder and will 
-               be made available to the classpath of the AJSC through the use of the system 
-               property, "AJSC_EXTERNAL_LIB_FOLDERS". This system property needs to be set 
-               in the "runAjsc" maven profile within the pom.xml (and, is defaulted to do 
-               so). If you have a startup failure related to a missing dme2 class not found 
-               exception, please contact the AJSC team for assistance. You can email support 
-               at ajsc-Support <DL-ajsc-Support@att.com>. For more information regarding 
+       <!-- The standard build tasks for this project are inherited from the parent.
+               Please do not override the build tasks. However tasks and/or profiles can
+               be included here as well as additional dependencies for your service. Any
+               runtime or compile scope dependencies will be copied to the INSTALLATION_PATH/extJars
+               folder and will be made available on the AJSC classpath for your service.
+               Please, NOTE: DME2 and CSM related dependencies are EXTERNALIZED within the
+               CSI environment. Therefore, they are provided within this project as "provided"
+               dependencies. In order for the AJSC to run properly, locally, the CSM and
+               DME2 dependencies will be copied into the target/commonLibs folder and will
+               be made available to the classpath of the AJSC through the use of the system
+               property, "AJSC_EXTERNAL_LIB_FOLDERS". This system property needs to be set
+               in the "runAjsc" maven profile within the pom.xml (and, is defaulted to do
+               so). If you have a startup failure related to a missing dme2 class not found
+               exception, please contact the AJSC team for assistance. You can email support
+               at ajsc-Support <DL-ajsc-Support@att.com>. For more information regarding
                the usage of the AJSC service pom.xml and management of dependencies, -->
        <dependencies>
 
-               <!-- cmn-CommonDataModel dependency added to resolve build issue not finding 
+               <!-- cmn-CommonDataModel dependency added to resolve build issue not finding
                        version 100.0.64 -->
 
-               <!-- <dependency> <groupId>csi-schemas-source</groupId> <artifactId>cmn-CommonDataModel</artifactId> 
+               <!-- <dependency> <groupId>csi-schemas-source</groupId> <artifactId>cmn-CommonDataModel</artifactId>
                        <version>112.0.50</version> </dependency> -->
                <!-- TODO: add open source version here -->
+               <dependency>
+                       <groupId>org.springframework.boot</groupId>
+                       <artifactId>spring-boot-starter-actuator</artifactId>
+                       <version>2.4.0</version>
+               </dependency>
+               <!-- Micormeter core dependecy  -->
+               <dependency>
+                       <groupId>io.micrometer</groupId>
+                       <artifactId>micrometer-core</artifactId>
+                       <version>1.7.1</version>
+               </dependency>
+               <!-- Micrometer Prometheus registry  -->
+               <dependency>
+                       <groupId>io.micrometer</groupId>
+                       <artifactId>micrometer-registry-prometheus</artifactId>
+                       <version>1.6.1</version>
+               </dependency>
+               <dependency>
+                       <groupId>com.sun.mail</groupId>
+                       <artifactId>javax.mail</artifactId>
+                       <version>1.6.0</version>
+                       <exclusions>
+                               <!-- javax activation is part of the JDK now -->
+                               <exclusion>
+                                       <groupId>javax.activation</groupId>
+                                       <artifactId>activation</artifactId>
+                               </exclusion>
+                       </exclusions>
+               </dependency>
+               <dependency>
+                       <groupId>com.sun.xml.bind</groupId>
+                       <artifactId>jaxb-core</artifactId>
+                       <version>2.3.0.1</version>
+                       <scope>compile</scope>
+               </dependency>
+               <dependency>
+                       <groupId>com.sun.xml.bind</groupId>
+                       <artifactId>jaxb-impl</artifactId>
+                       <version>2.3.0.1</version>
+                       <scope>compile</scope>
+                       <exclusions>
+                               <exclusion>
+                                       <artifactId>jsr181-api</artifactId>
+                                       <groupId>javax.jws</groupId>
+                               </exclusion>
+                       </exclusions>
+               </dependency>
 
-
-       <dependency>
-               <groupId>com.sun.mail</groupId>
-               <artifactId>javax.mail</artifactId>
-               <version>1.6.0</version>
-               <exclusions>
-                       <!-- javax activation is part of the JDK now -->
-                       <exclusion>
-                               <groupId>javax.activation</groupId>
-                               <artifactId>activation</artifactId>
-                       </exclusion>
-               </exclusions>
-       </dependency>
-       <dependency>
-               <groupId>com.sun.xml.bind</groupId>
-               <artifactId>jaxb-core</artifactId>
-               <version>2.3.0.1</version>
-               <scope>compile</scope>
-       </dependency>
-       <dependency>
-               <groupId>com.sun.xml.bind</groupId>
-               <artifactId>jaxb-impl</artifactId>
-               <version>2.3.0.1</version>
-               <scope>compile</scope>
-               <exclusions>
-                       <exclusion>
-                               <artifactId>jsr181-api</artifactId>
-                               <groupId>javax.jws</groupId>
-                       </exclusion>
-               </exclusions>
-       </dependency>
-        
-                <dependency>
-            <groupId>org.apache.cxf</groupId>
-            <artifactId>cxf-rt-transports-http</artifactId>
-            <version>3.2.5</version>
-           </dependency>
-
-               <!-- <dependency>
-                       <groupId>com.att.cadi</groupId>
-                       <artifactId>cadi-core</artifactId>
-                       <version>1.3.0</version>
+               <dependency>
+                       <groupId>org.apache.cxf</groupId>
+                       <artifactId>cxf-rt-transports-http</artifactId>
+                       <version>3.2.5</version>
                </dependency>
                <dependency>
                        <groupId>com.att.aft</groupId>
                        <artifactId>dme2</artifactId>
                        <version>3.1.200-oss</version>
-               </dependency> -->
+                       <exclusions>
+                               <exclusion>
+                                       <groupId>javax.jms</groupId>
+                                       <artifactId>jms</artifactId>
+                               </exclusion>
+                       </exclusions>
+               </dependency>
+               <dependency>
+                       <groupId>org.springframework</groupId>
+                       <artifactId>spring-test</artifactId>
+                       <version>3.2.12.RELEASE</version>
+                       <scope>test</scope>
+               </dependency>
                <dependency>
                        <groupId>org.springframework</groupId>
                        <artifactId>spring-expression</artifactId>
                        <artifactId>spring-messaging</artifactId>
                        <version>4.1.9.RELEASE</version>
                </dependency>
-               <!-- <dependency> <groupId>com.sun.mail</groupId> <artifactId>javax.mail</artifactId> 
-                       <version>${javax-mail-version}</version> <exclusions> javax activation is 
-                       part of the JDK now <exclusion> <groupId>javax.activation</groupId> <artifactId>activation</artifactId> 
-                       </exclusion> </exclusions> </dependency> -->
-
-       <dependency>
-       <groupId>org.apache.zookeeper</groupId>
-       <artifactId>zookeeper</artifactId>
-       <version>3.4.14</version>
-   </dependency>
-   <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-api</artifactId>
-      <version>1.6.1</version>
-    </dependency>
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
-      <version>1.6.1</version>
-      <exclusions>
-        <exclusion>
-          <groupId>*</groupId>
-          <artifactId>*</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-
-       <dependency>
-               <groupId>org.apache.kafka</groupId>
-               <artifactId>kafka_2.11</artifactId>
-               <version>2.3.0</version>
-       </dependency>
-       <dependency>
-               <groupId>commons-codec</groupId>
-               <artifactId>commons-codec</artifactId>
-               <version>1.14</version>
-       </dependency>
-       <dependency>
-               <groupId>commons-io</groupId>
-               <artifactId>commons-io</artifactId>
-               <version>2.7</version>
-       </dependency>
-       <dependency>
-               <groupId>org.quartz-scheduler</groupId>
-               <artifactId>quartz</artifactId>
-               <version>2.3.2</version>
-       </dependency>
-       <dependency>
-               <groupId>com.fasterxml.woodstox</groupId>
-               <artifactId>woodstox-core</artifactId>
-               <version>5.3.0</version>
-       </dependency>
+               <dependency>
+                       <groupId>org.apache.zookeeper</groupId>
+                       <artifactId>zookeeper</artifactId>
+                       <version>3.4.14</version>
+               </dependency>
+               <dependency>
+                       <groupId>org.slf4j</groupId>
+                       <artifactId>slf4j-api</artifactId>
+                       <version>1.6.1</version>
+               </dependency>
+               <!-- Log4j2 logger -->
+               <dependency>
+                       <groupId>org.apache.logging.log4j</groupId>
+                       <artifactId>log4j-api</artifactId>
+                       <version>${log4j.version}</version>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.logging.log4j</groupId>
+                       <artifactId>log4j-core</artifactId>
+                       <version>${log4j.version}</version>
+               </dependency>
+               <dependency>
+                       <groupId>org.slf4j</groupId>
+                       <artifactId>slf4j-log4j12</artifactId>
+                       <version>1.6.1</version>
+                       <exclusions>
+                               <exclusion>
+                                       <groupId>*</groupId>
+                                       <artifactId>*</artifactId>
+                               </exclusion>
+                       </exclusions>
+               </dependency>
+               <dependency>
+                       <groupId>org.hamcrest</groupId>
+                       <artifactId>hamcrest-library</artifactId>
+                       <version>1.3</version>
+                       <scope>test</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.kafka</groupId>
+                       <artifactId>kafka_2.11</artifactId>
+                       <version>0.11.0.3</version>
+               </dependency>
+               <dependency>
+                       <groupId>commons-codec</groupId>
+                       <artifactId>commons-codec</artifactId>
+                       <version>1.14</version>
+               </dependency>
+               <dependency>
+                       <groupId>jline</groupId>
+                       <artifactId>jline</artifactId>
+                       <version>2.12.1</version>
+               </dependency>
+               <dependency>
+                       <groupId>commons-io</groupId>
+                       <artifactId>commons-io</artifactId>
+                       <version>2.7</version>
+               </dependency>
+               <dependency>
+                       <groupId>org.quartz-scheduler</groupId>
+                       <artifactId>quartz</artifactId>
+                       <version>2.3.2</version>
+               </dependency>
+               <dependency>
+                       <groupId>com.fasterxml.woodstox</groupId>
+                       <artifactId>woodstox-core</artifactId>
+                       <version>5.3.0</version>
+               </dependency>
                <dependency>
                        <groupId>org.grails</groupId>
                        <artifactId>grails-bootstrap</artifactId>
                </dependency>
                <dependency>
                        <groupId>org.springframework</groupId>
-                       <artifactId>spring-webmvc</artifactId>
+                       <artifactId>spring-context</artifactId>
                        <version>4.3.18.RELEASE</version>
                </dependency>
                <dependency>
                        <groupId>org.springframework</groupId>
-                       <artifactId>spring-core</artifactId>
+                       <artifactId>spring-webmvc</artifactId>
                        <version>4.3.18.RELEASE</version>
                </dependency>
                <dependency>
                        <groupId>org.springframework</groupId>
-                       <artifactId>spring-beans</artifactId>
+                       <artifactId>spring-core</artifactId>
                        <version>4.3.18.RELEASE</version>
                </dependency>
-
                <dependency>
                        <groupId>org.springframework</groupId>
-                       <artifactId>spring-context</artifactId>
+                       <artifactId>spring-beans</artifactId>
                        <version>4.3.18.RELEASE</version>
                </dependency>
+
+
                <dependency>
                        <groupId>org.apache.commons</groupId>
                        <artifactId>commons-io</artifactId>
                        <version>1.3.2</version>
                </dependency>
                <dependency>
-                       <groupId>org.onap.dmaap.messagerouter.msgrtr</groupId>
-                       <artifactId>msgrtr</artifactId>
-                       <version>1.1.23</version>
+                       <groupId>com.att.eelf</groupId>
+                       <artifactId>eelf-core</artifactId>
+                       <version>1.0.0</version>
                        <exclusions>
                                <exclusion>
-                                       <groupId>org.apache.kafka</groupId>
-                               <artifactId>kafka_2.11</artifactId>
+                                       <groupId>org.mockito</groupId>
+                                       <artifactId>mockito-core</artifactId>
                                </exclusion>
                                <exclusion>
-                                       <groupId>org.slf4j</groupId>
-                                       <artifactId>slf4j-log4j12</artifactId>
+                                       <groupId>org.powermock</groupId>
+                                       <artifactId>powermock-module-junit4</artifactId>
                                </exclusion>
                                <exclusion>
-                                       <groupId>com.att.ajsc</groupId>
-                                       <artifactId>ajsc-core</artifactId>
+                                       <groupId>org.powermock</groupId>
+                                       <artifactId>powermock-api-mockito</artifactId>
                                </exclusion>
+                       </exclusions>
+               </dependency>
+
+               <!-- our NSA server library -->
+               <dependency>
+                       <groupId>com.att.nsa</groupId>
+                       <artifactId>nsaServerLibrary</artifactId>
+                       <version>1.0.10</version>
+                       <exclusions>
                                <exclusion>
                                        <groupId>org.apache.tomcat</groupId>
                                        <artifactId>tomcat-catalina</artifactId>
                                </exclusion>
-
                                <exclusion>
                                        <groupId>org.apache.tomcat.embed</groupId>
                                        <artifactId>tomcat-embed-core</artifactId>
                                        <groupId>org.apache.tomcat</groupId>
                                        <artifactId>tomcat-util</artifactId>
                                </exclusion>
-                               <!-- <exclusion> <groupId>com.att.nsa</groupId> <artifactId>saToolkit</artifactId> 
-                                       </exclusion> -->
-                               <exclusion>
-                                       <groupId>com.att.nsa</groupId>
-                                       <artifactId>tomcat-util</artifactId>
-                               </exclusion>
-
-                               <exclusion>
-                                       <groupId>commons-beanutils</groupId>
-                                       <artifactId>commons-beanutils</artifactId>
-                               </exclusion>
                                <exclusion>
                                        <groupId>javax.mail</groupId>
                                        <artifactId>mail</artifactId>
                                </exclusion>
                        </exclusions>
                </dependency>
+               <dependency>
+                       <groupId>com.att.nsa</groupId>
+                       <artifactId>saToolkit</artifactId>
+                       <version>0.0.1</version>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.curator</groupId>
+                       <artifactId>curator-recipes</artifactId>
+                       <version>4.0.1</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.apache.curator</groupId>
+                       <artifactId>curator-test</artifactId>
+                       <version>4.0.1</version>
+               </dependency>
+               <dependency>
+                       <groupId>javax.inject</groupId>
+                       <artifactId>javax.inject</artifactId>
+                       <version>1</version>
+               </dependency>
                <dependency>
                        <groupId>org.mockito</groupId>
                        <artifactId>mockito-core</artifactId>
-                       <version>1.10.19</version>
+                       <version>3.9.0</version>
+                       <scope>test</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.powermock</groupId>
+                       <artifactId>powermock-core</artifactId>
+                       <version>2.0.7</version>
                        <scope>test</scope>
                </dependency>
                <dependency>
                        <groupId>org.powermock</groupId>
                        <artifactId>powermock-module-junit4</artifactId>
-                       <version>1.6.4</version>
+                       <version>2.0.7</version>
                        <scope>test</scope>
                </dependency>
                <dependency>
                        <groupId>org.powermock</groupId>
-                       <artifactId>powermock-api-mockito</artifactId>
-                       <version>1.6.4</version>
+                       <artifactId>powermock-module-junit4-rule</artifactId>
+                       <version>2.0.7</version>
                        <scope>test</scope>
                </dependency>
-               <!-- <dependency> <groupId>org.codehaus.jackson</groupId> <artifactId>jackson-jaxrs</artifactId> 
-                       <version>1.9.13</version> </dependency> <dependency> <groupId>org.codehaus.jackson</groupId> 
-                       <artifactId>jackson-mapper-asl</artifactId> <version>1.9.13</version> </dependency> 
-                       <dependency> <groupId>org.codehaus.jackson</groupId> <artifactId>jackson-core-asl</artifactId> 
-                       <version>1.9.13</version> </dependency> -->
-
+               <dependency>
+                       <groupId>org.powermock</groupId>
+                       <artifactId>powermock-api-support</artifactId>
+                       <version>2.0.7</version>
+                       <scope>test</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.powermock</groupId>
+                       <artifactId>powermock-api-mockito2</artifactId>
+                       <version>2.0.7</version>
+               </dependency>
                <dependency>
                        <groupId>com.fasterxml.jackson.jaxrs</groupId>
                        <artifactId>jackson-jaxrs-json-provider</artifactId>
                        <groupId>org.apache.cxf</groupId>
                        <artifactId>cxf-rt-rs-extension-providers</artifactId>
                        <version>3.2.2</version>
-                       <!-- <exclusions> <exclusion> <groupId>org.apache.cxf</groupId> <artifactId>cxf-rt-transports-http</artifactId> 
-                               </exclusion> </exclusions> -->
                        <exclusions>
                                <exclusion>
                                        <groupId>org.apache.cxf</groupId>
                                </exclusion>
                        </exclusions>
                </dependency>
-               <!-- <dependency> <groupId>org.apache.cxf</groupId> <artifactId>cxf-rt-transports-http</artifactId> 
-                       <version>3.1.14</version> </dependency> -->
                <dependency>
                        <groupId>org.codehaus.jettison</groupId>
                        <artifactId>jettison</artifactId>
                        <version>1.3.7</version>
                </dependency>
-
-               <!-- <dependency> <groupId>dom4j</groupId> <artifactId>dom4j</artifactId> 
-                       <version>1.6.1</version> <scope>provided</scope> </dependency> -->
                <dependency>
                        <groupId>com.att.ajsc</groupId>
                        <artifactId>ajsc-archetype-parent</artifactId>
                        <type>pom</type>
                </dependency>
 
-       <dependency>
-               <groupId>org.onap.aaf.authz</groupId>
-               <artifactId>aaf-cadi-aaf</artifactId>
-               <version>2.7.4</version>
-       </dependency>
-       <dependency>
-               <groupId>backport-util-concurrent</groupId>
-               <artifactId>backport-util-concurrent</artifactId>
-               <version>3.1</version>
-       </dependency>
+               <dependency>
+                       <groupId>org.onap.aaf.authz</groupId>
+                       <artifactId>aaf-cadi-aaf</artifactId>
+                       <version>2.7.4</version>
+               </dependency>
+               <dependency>
+                       <groupId>backport-util-concurrent</groupId>
+                       <artifactId>backport-util-concurrent</artifactId>
+                       <version>3.1</version>
+               </dependency>
                <dependency>
                        <groupId>org.apache.camel</groupId>
                        <artifactId>camel-core</artifactId>
                        <groupId>org.apache.camel</groupId>
                        <artifactId>camel-servlet</artifactId>
                        <version>${camel.version}</version>
-                       <!-- <exclusions> <exclusion> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> 
-                               </exclusion> </exclusions> -->
                </dependency>
-               <!-- <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> 
-                       <version>4.0</version> </dependency> -->
-
+               <dependency>
+                       <groupId>org.apache.httpcomponents</groupId>
+                       <artifactId>httpcore</artifactId>
+                       <version>4.4.1</version>
+               </dependency>
                <dependency>
                        <groupId>org.apache.camel</groupId>
                        <artifactId>camel-http4</artifactId>
                        <version>3.0.11-oss</version>
                        <scope>provided</scope>
                </dependency>
-               <dependency>
-                       <groupId>org.springframework</groupId>
-                       <artifactId>spring-test</artifactId>
-                       <version>3.2.12.RELEASE</version>
-                       <scope>test</scope>
-               </dependency>
+
        </dependencies>
-       <!-- <build> <resources> <resource> <directory>${basedir}/ajsc-shared-config/etc</directory> 
-               </resource> </resources> </build> -->
        <profiles>
-               <!-- Use this profile to run the AJSC locally. This profile can be successfully 
-                       shutdown WITHIN eclipse even in a Windows environment. Debugging is also 
+               <!-- Use this profile to run the AJSC locally. This profile can be successfully
+                       shutdown WITHIN eclipse even in a Windows environment. Debugging is also
                        available with this profile. -->
                <profile>
                        <id>docker</id>
                                <skip.docker.push>false</skip.docker.push>
                        </properties>
                        <build>
-                <plugins>
-                    <plugin>
-                        <groupId>org.codehaus.groovy.maven</groupId>
-                        <artifactId>gmaven-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <phase>validate</phase>
-                                <goals>
-                                    <goal>execute</goal>
-                                </goals>
-                                <configuration>
-                                    <properties>
-                                        <ver>${project.version}</ver>
-                                        <timestamp>${maven.build.timestamp}</timestamp>
-                                    </properties>
-                                    <source>
-                                        println project.properties['ver'];
-                                        if ( project.properties['ver'].endsWith("-SNAPSHOT") ) {
-                                        project.properties['dockertag1']=project.properties['ver'] + "-latest";
-                                        project.properties['dockertag2']=project.properties['ver'] + "-" + project.properties['timestamp'];
-                                        } else {
-                                        project.properties['dockertag1']=project.properties['ver'] + "-STAGING-latest";
-                                        project.properties['dockertag2']=project.properties['ver'] + "-STAGING-" + project.properties['timestamp'];
-                                        }
-                                        println 'docker tag 1: ' + project.properties['dockertag1'];
-                                        println 'docker tag 2: ' + project.properties['dockertag2'];
-                                    </source>
-                                </configuration>
-                            </execution>
-                        </executions>
-                    </plugin>
-                   <plugin>
-                        <groupId>io.fabric8</groupId>
-                        <artifactId>docker-maven-plugin</artifactId>
-                        <version>0.28.0</version>  
-                        <configuration>
-                            <verbose>${docker.verbose}</verbose>
-                            <apiVersion>${docker.apiVersion}</apiVersion>
-                            <pullRegistry>${docker.pull.registry}</pullRegistry>
-                            <pushRegistry>${docker.push.registry}</pushRegistry>
-                            <images>
-                                
-                            </images>
-                        </configuration>
-                         <executions>
-                             <execution>
-                                 <id>generate-images</id>
-                                 <phase>install</phase>
-                                 <goals>
-                                     <goal>build</goal>
-                                 </goals>
-                             </execution>
-                             <execution>
-                                 <id>push-images</id>
-                                 <phase>deploy</phase>
-                                 <goals>
-                                     <goal>push</goal>
-                                 </goals>
-                             </execution>
-                         </executions>
-                    </plugin>
-                </plugins>
-            </build>
+                               <plugins>
+                                       <plugin>
+                                               <groupId>org.codehaus.groovy.maven</groupId>
+                                               <artifactId>gmaven-plugin</artifactId>
+                                               <executions>
+                                                       <execution>
+                                                               <phase>validate</phase>
+                                                               <goals>
+                                                                       <goal>execute</goal>
+                                                               </goals>
+                                                               <configuration>
+                                                                       <properties>
+                                                                               <ver>${project.version}</ver>
+                                                                               <timestamp>${maven.build.timestamp}</timestamp>
+                                                                       </properties>
+                                                                       <source>
+                                                                               println project.properties['ver'];
+                                                                               if ( project.properties['ver'].endsWith("-SNAPSHOT") ) {
+                                                                                       project.properties['dockertag1']=project.properties['ver'] + "-latest";
+                                                                                       project.properties['dockertag2']=project.properties['ver'] + "-" + project.properties['timestamp'];
+                                                                               } else {
+                                                                                       project.properties['dockertag1']=project.properties['ver'] + "-STAGING-latest";
+                                                                                       project.properties['dockertag2']=project.properties['ver'] + "-STAGING-" + project.properties['timestamp'];
+                                                                               }
+                                                                               println 'docker tag 1: ' + project.properties['dockertag1'];
+                                                                               println 'docker tag 2: ' + project.properties['dockertag2'];
+                                                                       </source>
+                                                               </configuration>
+                                                       </execution>
+                                               </executions>
+                                       </plugin>
+                                       <plugin>
+                                               <groupId>io.fabric8</groupId>
+                                               <artifactId>docker-maven-plugin</artifactId>
+                                               <version>0.28.0</version>
+                                               <configuration>
+                                                       <verbose>${docker.verbose}</verbose>
+                                                       <apiVersion>${docker.apiVersion}</apiVersion>
+                                                       <pullRegistry>${docker.pull.registry}</pullRegistry>
+                                                       <pushRegistry>${docker.push.registry}</pushRegistry>
+                                                       <images>
+                                                               
+                                                       </images>
+                                               </configuration>
+                                               <executions>
+                                                       <execution>
+                                                               <id>generate-images</id>
+                                                               <phase>install</phase>
+                                                               <goals>
+                                                                       <goal>build</goal>
+                                                               </goals>
+                                                       </execution>
+                                                       <execution>
+                                                               <id>push-images</id>
+                                                               <phase>deploy</phase>
+                                                               <goals>
+                                                                       <goal>push</goal>
+                                                               </goals>
+                                                       </execution>
+                                               </executions>
+                                       </plugin>
+                               </plugins>
+                       </build>
                </profile>
 
                <profile>
                                                                        <goal>java</goal>
                                                                </goals>
                                                                <configuration>
-                                                                       <!-- In order to better mimic a SOA cloud installation of AJSC (and 
-                                                                               to help eliminate Maven/Eclipse/AJSC classpath issues that may be difficult 
-                                                                               to diagnose), within this profile used to run locally, we are NOT including 
-                                                                               project dependencies. These will be loaded by AJSC from $AJSC_HOME/extJars. 
-                                                                               The only jar needed to run AJSC is the ajsc-runner.jar, and therefore is 
+                                                                       <!-- In order to better mimic a SOA cloud installation of AJSC (and
+                                                                               to help eliminate Maven/Eclipse/AJSC classpath issues that may be difficult
+                                                                               to diagnose), within this profile used to run locally, we are NOT including
+                                                                               project dependencies. These will be loaded by AJSC from $AJSC_HOME/extJars.
+                                                                               The only jar needed to run AJSC is the ajsc-runner.jar, and therefore is
                                                                                the only dependency required by this profile to run locally. -->
                                                                        <includeProjectDependencies>false</includeProjectDependencies>
                                                                        <includePluginDependencies>true</includePluginDependencies>
                                                                                        <value>${basedir}/ajsc-shared-config</value>
                                                                                </systemProperty>
 
-                                                                               <!-- Please, NOTE: The following 2 system properties will normally 
-                                                                                       be set within the sys-props.properties file once deployed to a node. We are 
-                                                                                       setting them HERE to run locally to make more efficient use of maven variable 
+                                                                               <!-- Please, NOTE: The following 2 system properties will normally
+                                                                                       be set within the sys-props.properties file once deployed to a node. We are
+                                                                                       setting them HERE to run locally to make more efficient use of maven variable
                                                                                        replacement for ${basedir} -->
-                                                                               <!-- AJSC_EXTERNAL_LIB_FOLDERS represents the particular jars that 
+                                                                               <!-- AJSC_EXTERNAL_LIB_FOLDERS represents the particular jars that
                                                                                        will be externalized on a CSI node. This includes dme2 and csm related artifact. -->
                                                                                <sysproperty>
                                                                                        <key>AJSC_EXTERNAL_LIB_FOLDERS</key>
                                                                                        <value>${basedir}/target/commonLibs</value>
                                                                                </sysproperty>
-                                                                               <!-- AJSC_EXTERNAL_PROPERTIES_FOLDERS represents the particular 
-                                                                                       files that may need to be added to the classpath. These files will be externalized 
-                                                                                       on a CSI node. This includes dme2 and csm related artifact (such as csm-config-app.properties). 
-                                                                                       Failure to have these files on the classpath may result in errors thrown 
+                                                                               <!-- AJSC_EXTERNAL_PROPERTIES_FOLDERS represents the particular
+                                                                                       files that may need to be added to the classpath. These files will be externalized
+                                                                                       on a CSI node. This includes dme2 and csm related artifact (such as csm-config-app.properties).
+                                                                                       Failure to have these files on the classpath may result in errors thrown
                                                                                        by csm framework. -->
                                                                                <sysproperty>
                                                                                        <key>AJSC_EXTERNAL_PROPERTIES_FOLDERS</key>
                                                                                </systemProperty>
                                                                        </systemProperties>
 
-                                                                       <!-- Command Line Arguments to add to the java command. Here, you 
-                                                                               can specify the port as well as the Context you want your service to run 
-                                                                               in. Use context=/ to run in an unnamed Context (Root Context). The default 
-                                                                               configuration of the AJSC is to run under the / Context. Setting the port 
-                                                                               here can aid during the development phase of your service. However, you can 
-                                                                               leave this argument out entirely, and the AJSC will default to using an Ephemeral 
+                                                                       <!-- Command Line Arguments to add to the java command. Here, you
+                                                                               can specify the port as well as the Context you want your service to run
+                                                                               in. Use context=/ to run in an unnamed Context (Root Context). The default
+                                                                               configuration of the AJSC is to run under the / Context. Setting the port
+                                                                               here can aid during the development phase of your service. However, you can
+                                                                               leave this argument out entirely, and the AJSC will default to using an Ephemeral
                                                                                port. -->
                                                                        <arguments>
                                                                                <argument>context=/</argument>
                                                                                <delete dir="target/versioned-runtime" includes="**/*" />
                                                                                <delete dir="target/CDP" includes="**/*" />
 
-                                                                               <!-- This is where replacer plugin replaces tokens (Example: __module_ajsc_namespace_name__ 
+                                                                               <!-- This is where replacer plugin replaces tokens (Example: __module_ajsc_namespace_name__
                                                                                        is replaced by module.ajsc.namespace.name) -->
                                                                                <copy todir="target/versioned-ajsc">
                                                                                        <fileset dir="src/main/ajsc" includes="**/*" />
                                                                                </copy>
 
-                                                                               <!-- Copying the CDP, bundleconfig, and StaticContent to the target 
+                                                                               <!-- Copying the CDP, bundleconfig, and StaticContent to the target
                                                                                        directory for future use by replacer plugin -->
                                                                                <copy todir="target/CDP" failonerror="false">
                                                                                        <fileset dir="CDP" includes="**/*.sh" />
                                                                                </copy>
                                                                                <copy flatten="true" file="CDP/SampleBlueprint.xml"
-                                                                                       failonerror="false"
-                                                                                       tofile="target/CDP/${module.ajsc.namespace.name}Blueprint.xml" />
+                                                                                         failonerror="false"
+                                                                                         tofile="target/CDP/${module.ajsc.namespace.name}Blueprint.xml" />
                                                                                <copy todir="target/staticContent" failonerror="false">
                                                                                        <fileset dir="staticContent" includes="**/*" />
                                                                                </copy>
                                                                                        <fileset dir="src/main/runtime" includes="**/*" />
                                                                                </copy>
                                                                                <delete
-                                                                                       file="target/versioned-runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context" />
+                                                                                               file="target/versioned-runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context" />
                                                                                <copy flatten="true"
-                                                                                       file="src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context"
-                                                                                       tofile="target/versioned-runtime/context/${module.ajsc.namespace.name}#${module.ajsc.namespace.version}.context" />
+                                                                                         file="src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context"
+                                                                                         tofile="target/versioned-runtime/context/${module.ajsc.namespace.name}#${module.ajsc.namespace.version}.context" />
                                                                                <delete
-                                                                                       file="target/versioned-runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json" />
+                                                                                               file="target/versioned-runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json" />
                                                                                <copy flatten="true"
-                                                                                       file="src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json"
-                                                                                       tofile="target/versioned-runtime/deploymentPackage/${module.ajsc.namespace.name}#${module.ajsc.namespace.version}.json" />
+                                                                                         file="src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json"
+                                                                                         tofile="target/versioned-runtime/deploymentPackage/${module.ajsc.namespace.name}#${module.ajsc.namespace.version}.json" />
                                                                                <delete
-                                                                                       file="target/versioned-runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json" />
+                                                                                               file="target/versioned-runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json" />
                                                                                <copy flatten="true"
-                                                                                       file="src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json"
-                                                                                       tofile="target/versioned-runtime/shiroRole/contextadmin#${module.ajsc.namespace.name}.json" />
+                                                                                         file="src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json"
+                                                                                         tofile="target/versioned-runtime/shiroRole/contextadmin#${module.ajsc.namespace.name}.json" />
                                                                                <delete
-                                                                                       file="target/versioned-runtime/shiroUserRole/ajsc#contextAdmin#__module.ajsc.namespace.name__.json" />
+                                                                                               file="target/versioned-runtime/shiroUserRole/ajsc#contextAdmin#__module.ajsc.namespace.name__.json" />
                                                                                <copy flatten="true"
-                                                                                       file="src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json"
-                                                                                       tofile="target/versioned-runtime/shiroUserRole/ajsc#contextadmin#${module.ajsc.namespace.name}.json" />
+                                                                                         file="src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json"
+                                                                                         tofile="target/versioned-runtime/shiroUserRole/ajsc#contextadmin#${module.ajsc.namespace.name}.json" />
                                                                                <echo message="EXITING 'copy_runtime_template' ant tasks" />
                                                                        </target>
                                                                </configuration>
                                                                <configuration>
                                                                        <tasks>
                                                                                <fixcrlf
-                                                                                       srcdir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}"
-                                                                                       includes="**/*.sh,**/*.xml,**/*.properties,**/*.xsd" />
+                                                                                               srcdir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}"
+                                                                                               includes="**/*.sh,**/*.xml,**/*.properties,**/*.xsd" />
                                                                        </tasks>
                                                                </configuration>
                                                                <goals>
                                                                <configuration>
                                                                        <includeScope>provided</includeScope>
                                                                        <includeGroupIds>net.cingular.enterprise,com.att.aft,dom4j</includeGroupIds>
-                                                                       <!-- <includeGroupIds>com.att.aft</includeGroupIds> -->
                                                                        <outputDirectory>${project.build.directory}/commonLibs</outputDirectory>
                                                                        <silent>true</silent>
                                                                </configuration>
index 4b9324e..1c21383 100644 (file)
@@ -22,6 +22,8 @@
  package org.onap.dmaap;
 
 
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
 import javax.inject.Singleton;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
@@ -29,16 +31,10 @@ import javax.ws.rs.core.Context;
 import javax.ws.rs.core.Response;
 import javax.ws.rs.ext.ExceptionMapper;
 import javax.ws.rs.ext.Provider;
-
-import org.apache.http.HttpStatus;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.springframework.beans.factory.annotation.Autowired;
-
 import org.onap.dmaap.dmf.mr.CambriaApiException;
 import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
-import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode;
 import org.onap.dmaap.dmf.mr.exception.ErrorResponse;
+import org.springframework.beans.factory.annotation.Autowired;
 
 /**
  * Exception Mapper class to handle
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/CambriaApiException.java b/src/main/java/org/onap/dmaap/dmf/mr/CambriaApiException.java
new file mode 100644 (file)
index 0000000..c30c344
--- /dev/null
@@ -0,0 +1,79 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr;
+
+import com.att.nsa.apiServer.NsaAppException;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.exception.ErrorResponse;
+
+public class CambriaApiException extends NsaAppException
+{
+       /*
+        * defined long type constant serialVersionUID
+        */
+       private static final long serialVersionUID = 1L;
+       
+       private transient ErrorResponse errRes;
+       /**
+        * Implements constructor CambriaApiException
+        * @param jsonObject
+        * 
+        */
+       public CambriaApiException ( JSONObject jsonObject )
+       {
+               super ( jsonObject );
+       }
+
+       /**
+        * Implements constructor CambriaApiException
+        * @param status
+        * @param msg
+        */
+       public CambriaApiException ( int status, String msg )
+       {
+               super ( status, msg );
+       }
+
+       /**
+        * Implements constructor CambriaApiException
+        * @param status
+        * @param jsonObject
+        */
+       public CambriaApiException ( int status, JSONObject jsonObject )
+       {
+               super ( status, jsonObject );
+       }
+       
+       public CambriaApiException (ErrorResponse errRes)
+       {
+               super(errRes.getHttpStatusCode(),errRes.getErrorMessage());
+               this.errRes = errRes;
+       }
+       
+       public ErrorResponse getErrRes() {
+               return errRes;
+       }
+
+       public void setErrRes(ErrorResponse errRes) {
+               this.errRes = errRes;
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/CambriaApiVersionInfo.java b/src/main/java/org/onap/dmaap/dmf/mr/CambriaApiVersionInfo.java
new file mode 100644 (file)
index 0000000..7a1d9d7
--- /dev/null
@@ -0,0 +1,89 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+/**
+ * CambriaApiVersionInfo will provide the version of cambria code
+ * 
+ * @author peter
+ *
+ */
+public class CambriaApiVersionInfo {
+    
+       /**
+        * 3 constants are defined:-
+        * PROPS,VERSION and LOG
+        */
+       
+       private static final Properties PROPS = new Properties();
+    private static final String VERSION;
+
+
+    private static final EELFLogger LOG = EELFManager.getInstance().getLogger(CambriaApiVersionInfo.class);
+    
+    /**
+     * private constructor created with no argument
+     * to avoid default constructor
+     */
+    private CambriaApiVersionInfo()
+    {
+       
+    }
+    
+    /**
+     * returns version of String type
+     */
+    public static String getVersion() {
+        return VERSION;
+    }
+
+    /** 
+     * 
+     * defines static initialization method
+     * It initializes VERSION Constant
+     * it handles exception in try catch block 
+     * and throws IOException
+     * 
+     */
+    
+    static {
+        String use = null;
+        try {
+            final InputStream is = CambriaApiVersionInfo.class
+                    .getResourceAsStream("/cambriaApiVersion.properties");
+            if (is != null) {
+               PROPS.load(is);
+                use = PROPS.getProperty("cambriaApiVersion", null);
+            }
+        } catch (IOException e) {
+            LOG.error("Failed due to IO EXception:"+e);
+        }
+        VERSION = use;
+    }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/Consumer.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/Consumer.java
new file mode 100644 (file)
index 0000000..cba3696
--- /dev/null
@@ -0,0 +1,105 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends;
+
+
+/**
+ * A consumer interface. Consumers pull the next message from a given topic.
+ * @author peter
+ */
+public interface Consumer
+{      
+       /**
+        * A message interface provide the offset and message
+        * @author nilanjana.maity
+        *
+        */
+       public interface Message
+       {       
+               /**
+                * returning the offset of that particular message 
+                * @return long
+                */
+               long getOffset ();
+               /**
+                * returning the message 
+                * @return message
+                */
+               String getMessage ();
+       }
+
+       /**
+        * Get this consumer's name
+        * @return name
+        */
+       String getName ();
+
+       /**
+        * Get creation time in ms
+        * @return
+        */
+       long getCreateTimeMs ();
+
+       /**
+        * Get last access time in ms
+        * @return
+        */
+       long getLastAccessMs ();
+       
+       /**
+        * Get the next message from this source. This method must not block.
+        * @return the next message, or null if none are waiting
+        */
+       Message nextMessage ();
+
+       /**
+        * Get the next message from this source. This method must not block.
+        * @param atOffset start with the next message at or after atOffset. -1 means next from last request
+        * @return the next message, or null if none are waiting
+        */
+
+
+       
+       /**
+        * Close/clean up this consumer
+        * @return 
+        */
+       boolean close();
+       
+       /**
+        * Commit the offset of the last consumed message
+        * 
+        */
+       void commitOffsets();
+       
+       /**
+        * Get the offset this consumer is currently at
+        * @return offset
+        */
+       long getOffset();
+       
+       void setOffset(long offset);
+       
+       
+       
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/ConsumerFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/ConsumerFactory.java
new file mode 100644 (file)
index 0000000..56ab21b
--- /dev/null
@@ -0,0 +1,118 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends;
+
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+
+import java.util.Collection;
+import java.util.HashMap;
+
+/**
+ * This is the factory class to instantiate the consumer
+ * 
+ * @author nilanjana.maity
+ *
+ */
+
+public interface ConsumerFactory {
+       public static final String kSetting_EnableCache = "cambria.consumer.cache.enabled";
+       public static boolean kDefault_IsCacheEnabled = true;
+
+       /**
+        * User defined exception for Unavailable Exception
+        * 
+        * @author nilanjana.maity
+        *
+        */
+       public class UnavailableException extends Exception {
+               /**
+                * Unavailable Exception with message
+                * 
+                * @param msg
+                */
+               public UnavailableException(String msg) {
+                       super(msg);
+               }
+
+               /**
+                * Unavailable Exception with the throwable object
+                * 
+                * @param t
+                */
+               public UnavailableException(Throwable t) {
+                       super(t);
+               }
+
+               /**
+                * Unavailable Exception with the message and cause
+                * 
+                * @param msg
+                * @param cause
+                */
+               public UnavailableException(String msg, Throwable cause) {
+                       super(msg, cause);
+               }
+
+               private static final long serialVersionUID = 1L;
+       }
+
+       /**
+        * For admin use, drop all cached consumers.
+        */
+       public void dropCache();
+
+       /**
+        * Get or create a consumer for the given set of info (topic, group, id)
+        * 
+        * @param topic
+        * @param consumerGroupId
+        * @param clientId
+        * @param timeoutMs
+        * @return
+        * @throws UnavailableException
+        */
+       
+
+       /**
+        * For factories that employ a caching mechanism, this allows callers to
+        * explicitly destory a consumer that resides in the factory's cache.
+        * 
+        * @param topic
+        * @param consumerGroupId
+        * @param clientId
+        */
+       public void destroyConsumer(String topic, String consumerGroupId,
+                       String clientId);
+
+       /**
+        * For admin/debug, we provide access to the consumers
+        * 
+        * @return a collection of consumers
+        */
+       public Collection<? extends Consumer> getConsumers();
+
+       public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs, String remotehost) throws UnavailableException, CambriaApiException;
+       public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs, String remotehost) throws UnavailableException, CambriaApiException;
+
+
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/MetricsSet.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/MetricsSet.java
new file mode 100644 (file)
index 0000000..5d5121a
--- /dev/null
@@ -0,0 +1,72 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends;
+
+import com.att.nsa.metrics.CdmMetricsRegistry;
+
+/**
+ * This interface will help to generate metrics
+ * @author nilanjana.maity
+ *
+ */
+public interface MetricsSet extends CdmMetricsRegistry{
+
+       /**
+        * This method will setup cambria sender code
+        */
+       public void setupCambriaSender ();
+       /**
+        * This method will define on route complete
+        * @param name
+        * @param durationMs
+        */
+       public void onRouteComplete ( String name, long durationMs );
+       /**
+        * This method will help the kafka publisher while publishing the messages
+        * @param amount
+        */
+       public void publishTick ( int amount );
+       /**
+        * This method will help the kafka consumer while consuming the messages
+        * @param amount
+        */
+       public void consumeTick ( int amount );
+       /**
+        * This method will call if the kafka consumer cache missed 
+        */
+       public void onKafkaConsumerCacheMiss ();
+       /**
+        * This method will call if the kafka consumer cache will be hit while publishing/consuming the messages
+        */
+       public void onKafkaConsumerCacheHit ();
+       /**
+        * This method will call if the kafka consumer cache claimed
+        */
+       public void onKafkaConsumerClaimed ();
+       /**
+        * This method will call if Kafka consumer is timed out
+        */
+       public void onKafkaConsumerTimeout ();
+
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/Publisher.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/Publisher.java
new file mode 100644 (file)
index 0000000..60b746d
--- /dev/null
@@ -0,0 +1,98 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends;
+
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A publisher interface. Publishers receive messages and post them to a topic.
+ * @author peter
+ */
+public interface Publisher
+{
+       /**
+        * A message interface. The message has a key and a body.
+        * @author peter
+        */
+       public interface message
+       {
+               /**
+                * Get the key for this message. The key is used to partition messages
+                * into "sub-streams" that have guaranteed order. The key can be null,
+                * which means the message can be processed without any concern for order.
+                * 
+                * @return a key, possibly null
+                */
+               String getKey();
+
+               /**
+                * Get the message body.
+                * @return a message body
+                */
+               String getMessage();
+               /**
+                * set the logging params for transaction enabled logging 
+                * @param logDetails
+                */
+               void setLogDetails (LogDetails logDetails);
+               /**
+                * Get the log details for transaction enabled logging
+                * @return LogDetails
+                */
+               LogDetails getLogDetails ();
+               
+               /**
+                * boolean transactionEnabled
+                * @return true/false
+                */
+               boolean isTransactionEnabled();
+               /**
+                * Set the transaction enabled flag from prop file or topic based implementation
+                * @param transactionEnabled
+                */
+               void setTransactionEnabled(boolean transactionEnabled);
+       }
+
+       /**
+        * Send a single message to a topic. Equivalent to sendMessages with a list of size 1.
+        * @param topic
+        * @param msg
+        * @throws IOException
+        */
+       public void sendMessage ( String topic, message msg ) throws IOException;
+
+       /**
+        * Send messages to a topic.
+        * @param topic
+        * @param msgs
+        * @throws IOException
+        */
+       public void sendMessages ( String topic, List<? extends message> msgs ) throws IOException;
+       
+       public void sendBatchMessageNew(String topic ,ArrayList<ProducerRecord<String,String>> kms) throws IOException;
+       public void sendMessagesNew( String topic, List<? extends message> msgs ) throws IOException;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011Consumer.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011Consumer.java
new file mode 100644 (file)
index 0000000..4d6c81c
--- /dev/null
@@ -0,0 +1,394 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends.kafka;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.commons.lang.StringUtils;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.KafkaException;
+import org.onap.dmaap.dmf.mr.backends.Consumer;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Arrays;
+import java.util.concurrent.*;
+
+/**
+ * A consumer instance that's created per-request. These are stateless so that
+ * clients can connect to this service as a proxy.
+ * 
+ * @author Ram
+ *
+ */
+public class Kafka011Consumer implements Consumer {
+       private enum State {
+               OPENED, CLOSED
+       }
+
+       
+       /**
+        * KafkaConsumer() is constructor. It has following 4 parameters:-
+        * 
+        * @param topic
+        * @param group
+        * @param id
+        * @param cc
+        * 
+        */
+
+       public Kafka011Consumer(String topic, String group, String id, KafkaConsumer<String, String> cc,
+                       KafkaLiveLockAvoider2 klla) throws Exception {
+               fTopic = topic;
+               fGroup = group;
+               fId = id;
+               fCreateTimeMs = System.currentTimeMillis();
+               fLastTouch = fCreateTimeMs;
+               fPendingMsgs = new LinkedBlockingQueue<>();
+               fLogTag = fGroup + "(" + fId + ")/" + fTopic;
+               offset = 0;
+               state = State.OPENED;
+               kConsumer = cc;
+               fKafkaLiveLockAvoider = klla;
+
+               String consumerTimeOut = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "consumer.timeout");
+               if (StringUtils.isNotEmpty(consumerTimeOut)) {
+                       consumerPollTimeOut = Integer.parseInt(consumerTimeOut);
+               }
+               synchronized (kConsumer) {
+                       kConsumer.subscribe(Arrays.asList(topic));
+               }
+       }
+
+       private Message makeMessage(final ConsumerRecord<String, String> msg) {
+               return new Message() {
+                       @Override
+                       public long getOffset() {
+                               offset = msg.offset();
+                               return offset;
+                       }
+
+                       @Override
+                       public String getMessage() {
+                               return new String(msg.value());
+                       }
+               };
+       }
+
+       @Override
+       public synchronized Message nextMessage() {
+
+               try {
+                       if (!fPendingMsgs.isEmpty()) {
+                               return makeMessage(fPendingMsgs.take());
+                       }
+               } catch (InterruptedException x) {
+                       log.warn("After size>0, pending msg take() threw InterruptedException. Ignoring. (" + x.getMessage() + ")",
+                                       x);
+                       Thread.currentThread().interrupt();
+               }
+
+               Callable<Boolean> run = new Callable<Boolean>() {
+                       @Override
+                       public Boolean call() throws Exception {
+                               try {
+                                       ConsumerRecords<String, String> records;
+                                       synchronized (kConsumer) {
+                                               records = kConsumer.poll(500);
+                                       }
+                                       for (ConsumerRecord<String, String> record : records) {
+
+                                               fPendingMsgs.offer(record);
+                                       }
+
+                               } catch (KafkaException x) {
+                                       log.debug(fLogTag + ": KafkaException ", x);
+
+                               } catch (IllegalStateException | IllegalArgumentException x) {
+                                       log.error(fLogTag + ": Illegal state/arg exception in Kafka consumer; dropping stream. ", x);
+
+                               }
+
+
+                               return true;
+                       }
+               };
+
+               @SuppressWarnings({ "rawtypes", "unchecked" })
+               RunnableFuture future = new FutureTask(run);
+               ExecutorService service = Executors.newSingleThreadExecutor();
+               service.execute(future);
+               try {
+                       future.get(consumerPollTimeOut, TimeUnit.SECONDS); // wait 1
+                       // second
+               } catch (TimeoutException ex) {
+               log.error("TimeoutException in in Kafka consumer ", ex);
+                       // timed out. Try to stop the code if possible.
+                       String apiNodeId = null;
+                       try {
+                               apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port;
+                       } catch (UnknownHostException e1) {
+                               log.error("unable to get the localhost address ", e1);
+                       }
+
+                       try {
+                               if (fKafkaLiveLockAvoider != null)
+                                       fKafkaLiveLockAvoider.unlockConsumerGroup(apiNodeId, fTopic + "::" + fGroup);
+                       } catch (Exception e) {
+                               log.error("Exception in unlockConsumerGroup(" + apiNodeId + "," + fTopic + "::" + fGroup, e);
+                       }
+
+                       forcePollOnConsumer();
+                       future.cancel(true);
+               } catch (Exception ex) {
+            log.error("Exception in in Kafka consumer ", ex);
+                       // timed out. Try to stop the code if possible.
+                       future.cancel(true);
+               }
+               service.shutdown();
+
+               return null;
+
+       }
+
+       /**
+        * getName() method returns string type value. returns 3 parameters in
+        * string:- fTopic,fGroup,fId
+        *
+        * @Override
+        */
+       public String getName() {
+               return fTopic + " : " + fGroup + " : " + fId;
+       }
+
+       /**
+        * getCreateTimeMs() method returns long type value. returns fCreateTimeMs
+        * variable value
+        *
+        * @Override
+        *
+        */
+       public long getCreateTimeMs() {
+               return fCreateTimeMs;
+       }
+
+       public KafkaConsumer<String, String> getConsumer() {
+               return kConsumer;
+       }
+
+       /**
+        * getLastAccessMs() method returns long type value. returns fLastTouch
+        * variable value
+        *
+        * @Override
+        *
+        */
+       public long getLastAccessMs() {
+               return fLastTouch;
+       }
+
+       /**
+        * getOffset() method returns long type value. returns offset variable value
+        *
+        * @Override
+        *
+        */
+       public long getOffset() {
+               return offset;
+       }
+
+       /**
+        * commit offsets commitOffsets() method will be called on closed of
+        * KafkaConsumer.
+        *
+        * @Override
+        *
+        *
+        *                      public void commitOffsets() { if (getState() ==
+        *           KafkaConsumer.State.CLOSED) { log.warn("commitOffsets() called
+        *           on closed KafkaConsumer " + getName()); return; }
+        *           fConnector.commitOffsets(); }
+        */
+
+       /**
+        * updating fLastTouch with current time in ms
+        */
+       public void touch() {
+               fLastTouch = System.currentTimeMillis();
+       }
+
+       /**
+        * getLastTouch() method returns long type value. returns fLastTouch
+        * variable value
+        *
+        */
+       public long getLastTouch() {
+               return fLastTouch;
+       }
+
+       /**
+        * setting the kafkaConsumer state to closed
+        */
+
+       public boolean close() {
+               if (getState() == State.CLOSED) {
+
+                       log.error("close() called on closed KafkaConsumer " + getName());
+                       return true;
+               }
+
+
+               boolean retVal = kafkaConnectorshuttask();
+               return retVal;
+
+       }
+
+       /* time out if the kafka shutdown fails for some reason */
+
+       private boolean kafkaConnectorshuttask() {
+               Callable<Boolean> run = new Callable<Boolean>() {
+                       @Override
+                       public Boolean call() throws Exception {
+
+                               try {
+
+                                       kConsumer.close();
+
+                               } catch (Exception e) {
+                                       log.info("@Kafka Stream shutdown erorr occurred " + getName() + " " + e);
+                                       throw new Exception("@Kafka Stream shutdown erorr occurred " + getName() + " " + e);
+
+                               }
+                               log.info("Kafka connection closure with in 15 seconds by a Executors task");
+
+                               return true;
+                       }
+               };
+
+               @SuppressWarnings({ "rawtypes", "unchecked" })
+               RunnableFuture future = new FutureTask(run);
+               ExecutorService service = Executors.newSingleThreadExecutor();
+               service.execute(future);
+               try {
+                  future.get(200, TimeUnit.SECONDS); // wait 1
+                       // second
+               } catch (TimeoutException ex) {
+                       // timed out. Try to stop the code if possible.
+                       log.info("Timeout Occured - Kafka connection closure with in 300 seconds by a Executors task ", ex);
+                       future.cancel(true);
+                       setState(State.OPENED);
+               } catch (Exception ex) {
+                       // timed out. Try to stop the code if possible.
+                       log.error("Exception Occured - Kafka connection closure with in 300 seconds by a Executors task ", ex);
+                       future.cancel(true);
+                       setState(State.OPENED);
+                       return false;
+               }
+               service.shutdown();
+               setState(State.CLOSED);
+               return true;
+       }
+
+       public void forcePollOnConsumer() {
+               Kafka011ConsumerUtil.forcePollOnConsumer(fTopic, fGroup, fId);
+
+       }
+
+       /**
+        * getConsumerGroup() returns Consumer group
+        *
+        * @return
+        */
+       public String getConsumerGroup() {
+               return fGroup;
+       }
+
+       /**
+        * getConsumerId returns Consumer Id
+        *
+        * @return
+        */
+       public String getConsumerId() {
+               return fId;
+       }
+
+       /**
+        * getState returns kafkaconsumer state
+        *
+        * @return
+        */
+       private State getState() {
+               return this.state;
+       }
+
+       /**
+        * setState() sets the kafkaConsumer state
+        *
+        * @param state
+        */
+       private void setState(State state) {
+               this.state = state;
+       }
+
+
+       private final String fTopic;
+       private final String fGroup;
+       private final String fId;
+       private final String fLogTag;
+
+       private KafkaConsumer<String, String> kConsumer;
+       private long fCreateTimeMs;
+       private long fLastTouch;
+       private long offset;
+       private State state;
+       private KafkaLiveLockAvoider2 fKafkaLiveLockAvoider;
+       private int consumerPollTimeOut=5;
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(Kafka011Consumer.class);
+       private final LinkedBlockingQueue<ConsumerRecord<String, String>> fPendingMsgs;
+
+       @Override
+       public void commitOffsets() {
+               if (getState() == State.CLOSED) {
+                       log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
+                       return;
+               }
+               kConsumer.commitSync();
+               
+
+       }
+
+       @Override
+       public void setOffset(long offsetval) {
+               offset = offsetval;
+       }
+
+       
+       public void setConsumerCache(KafkaConsumerCache cache) {
+       }
+
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java
new file mode 100644 (file)
index 0000000..c66904b
--- /dev/null
@@ -0,0 +1,120 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends.kafka;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+
+import java.util.ArrayList;
+
+/**
+ * A consumer Util class for force polling when a rebalance issue is anticipated
+ * 
+ * @author Ram
+ *
+ */
+public class Kafka011ConsumerUtil {
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(Kafka011ConsumerUtil.class);
+
+       /**
+        * @param fconsumercache
+        * @param fTopic
+        * @param fGroup
+        * @param fId
+        * @return
+        */
+       public static boolean forcePollOnConsumer(final String fTopic, final String fGroup, final String fId) {
+
+               Thread forcepollThread = new Thread(new Runnable() {
+                       public void run() {
+                               try {
+
+                                       ArrayList<Kafka011Consumer> kcsList = null;
+
+                                       kcsList = KafkaConsumerCache.getInstance().getConsumerListForCG(fTopic + "::" + fGroup + "::", fId);
+                                       if (null != kcsList) {
+                                               for (int counter = 0; counter < kcsList.size(); counter++) {
+
+                                                       Kafka011Consumer kc1 = kcsList.get(counter);
+
+                                                       try {
+                                                               ConsumerRecords<String, String> recs = kc1.getConsumer().poll(0);
+                                                               log.info("soft poll on " + kc1);
+                                                       } catch (java.util.ConcurrentModificationException e) {
+                                                               log.error("Error occurs for " + e);
+                                                       }
+
+                                               }
+
+                                       }
+
+                               } catch (Exception e) {
+                                       log.error("Failed and go to Exception block for " + fGroup +" ", e);
+                               }
+                       }
+               });
+
+               forcepollThread.start();
+
+               return false;
+
+       }
+
+       /**
+        * @param fconsumercache
+        * @param group
+        * @return
+        */
+       public static boolean forcePollOnConsumer(final String group) {
+
+               Thread forcepollThread = new Thread(new Runnable() {
+                       public void run() {
+                               try {
+                                       ArrayList<Kafka011Consumer> kcsList = new ArrayList<Kafka011Consumer>();
+                                       kcsList = KafkaConsumerCache.getInstance().getConsumerListForCG(group);
+
+                                       if (null != kcsList) {
+
+                                               for (int counter = 0; counter < kcsList.size(); counter++) {
+
+                                                       Kafka011Consumer kc1 = kcsList.get(counter);
+                                                       log.info("soft poll on remote nodes " + kc1);
+                                                       ConsumerRecords<String, String> recs = kc1.getConsumer().poll(0);
+                                               }
+
+                                       }
+
+                               } catch (java.util.ConcurrentModificationException e) {
+                                       log.error("Error occurs for ", e);
+                               } catch (Exception e) {
+                                       log.error("Failed and go to Exception block for " + group + " ", e);
+                               }
+                       }
+               });
+
+               forcepollThread.start();
+               return false;
+
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumer.txt b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumer.txt
new file mode 100644 (file)
index 0000000..dd6259f
--- /dev/null
@@ -0,0 +1,386 @@
+package com.att.dmf.mr.backends.kafka;
+
+import java.util.Arrays;
+import java.util.Properties;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.common.KafkaException;
+
+import com.att.dmf.mr.backends.Consumer;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * A consumer instance that's created per-request. These are stateless so that
+ * clients can connect to this service as a proxy.
+ * 
+ * @author peter
+ *
+ */
+public class KafkaConsumer implements Consumer {
+       private enum State {
+               OPENED, CLOSED
+       }
+
+       /**
+        * KafkaConsumer() is constructor. It has following 4 parameters:-
+        * 
+        * @param topic
+        * @param group
+        * @param id
+        * @param cc
+        * 
+        */
+
+       public KafkaConsumer(String topic, String group, String id, Properties prop) throws Exception {
+               fTopic = topic;
+               fGroup = group;
+               fId = id;
+               // fConnector = cc;
+
+               fCreateTimeMs = System.currentTimeMillis();
+               fLastTouch = fCreateTimeMs;
+               fPendingMsgs = new LinkedBlockingQueue<ConsumerRecord<String,String>> ();
+               fLogTag = fGroup + "(" + fId + ")/" + fTopic;
+               offset = 0;
+
+               state = KafkaConsumer.State.OPENED;
+
+               // final Map<String, Integer> topicCountMap = new HashMap<String,
+               // Integer>();
+               // topicCountMap.put(fTopic, 1);
+               // log.info(fLogTag +" kafka Consumer started at "
+               // +System.currentTimeMillis());
+               // final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
+               // fConnector.createMessageStreams(topicCountMap);
+               // final List<KafkaStream<byte[], byte[]>> streams =
+               // consumerMap.get(fTopic);
+
+               kConsumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(prop);
+               // System.out.println("I am in Consumer APP " + topic + "-- " +
+               // fConsumer);
+               kConsumer.subscribe(Arrays.asList(topic));
+               log.info(fLogTag + " kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
+       System.out.println("-----id " +id);
+       
+               
+         try { ConsumerRecords<String, String> records =
+                                                  kConsumer.poll(500); System.out.println("---" +
+                                                   records.count());
+                                         
+                                                   for (ConsumerRecord<String, String> record : records) {
+                                                   System.out.printf("offset = %d, key = %s, value = %s",
+                                                   record.offset(), record.key(), record.value()); String t =
+                                                   record.value();
+                                         
+                                                   }
+                 }catch(Exception e){
+                         System.out.println( e);
+                 }
+                       System.out.println(fLogTag + " kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
+                       kConsumer.commitSync();
+               //  fConsumer.close();  
+       
+
+               /*
+                * ConsumerRecords<String, String> records = fConsumer.poll(500);
+                * System.out.println("---" + records.count());
+                * 
+                * for (ConsumerRecord<String, String> record : records) {
+                * System.out.printf("offset = %d, key = %s, value = %s",
+                * record.offset(), record.key(), record.value()); String t =
+                * record.value();
+                * 
+                * }
+                * 
+                * 
+                * fConsumer.commitSync(); fConsumer.close();
+                */
+
+               // fStream = streams.iterator().next();
+       }
+       
+       
+       
+       private Consumer.Message makeMessage ( final ConsumerRecord<String,String> msg )
+       {
+               return new Consumer.Message()
+               {
+                       @Override
+                       public long getOffset ()
+                       {
+                               return msg.offset ();
+                       }
+                       
+                       @Override
+                       public String getMessage ()
+                       {
+                               return new String ( msg.value () );
+                       }
+               };
+       }
+       
+       @Override
+       public synchronized Consumer.Message nextMessage ()
+       {
+               
+               try
+               {
+                       if ( fPendingMsgs.size () > 0 )
+                       {
+                               return makeMessage ( fPendingMsgs.take () );
+                       }
+               }
+               catch ( InterruptedException x )
+               {
+                       log.warn ( "After size>0, pending msg take() threw InterruptedException. Ignoring. (" + x.getMessage () + ")", x );
+               }
+               
+               
+                       try
+                       {
+                               boolean foundMsgs = false;
+                               System.out.println("entering into pollingWWWWWWWWWWWWWWWWW");
+                               final ConsumerRecords<String,String> records = kConsumer.poll ( 100 );
+                               System.out.println("polling doneXXXXXXXXXXXXXXXXXXXXXXXXXXX....");
+                               for ( ConsumerRecord<String,String> record : records )
+                               {
+                                       foundMsgs = true;
+                                       fPendingMsgs.offer ( record );
+                               }
+                       
+                       }
+                       catch ( KafkaException x )
+                       {
+                               log.debug ( fLogTag + ": KafkaException " + x.getMessage () );
+                               
+                       }
+                       catch ( java.lang.IllegalStateException | java.lang.IllegalArgumentException x )
+                       {
+                               log.error ( fLogTag + ": Illegal state/arg exception in Kafka consumer; dropping stream. " + x.getMessage () );
+                       
+                       }
+                               
+               return null;
+       }
+       
+       
+
+       /**
+        * getName() method returns string type value. returns 3 parameters in
+        * string:- fTopic,fGroup,fId
+        * 
+        * @Override
+        */
+       public String getName() {
+               return fTopic + " : " + fGroup + " : " + fId;
+       }
+
+       /**
+        * getCreateTimeMs() method returns long type value. returns fCreateTimeMs
+        * variable value
+        * 
+        * @Override
+        * 
+        */
+       public long getCreateTimeMs() {
+               return fCreateTimeMs;
+       }
+
+       public org.apache.kafka.clients.consumer.KafkaConsumer getConsumer() {
+               return kConsumer;
+       }
+
+       /**
+        * getLastAccessMs() method returns long type value. returns fLastTouch
+        * variable value
+        * 
+        * @Override
+        * 
+        */
+       public long getLastAccessMs() {
+               return fLastTouch;
+       }
+
+       
+
+       /**
+        * getOffset() method returns long type value. returns offset variable value
+        * 
+        * @Override
+        * 
+        */
+       public long getOffset() {
+               return offset;
+       }
+
+       /**
+        * commit offsets commitOffsets() method will be called on closed of
+        * KafkaConsumer.
+        * 
+        * @Override
+        * 
+        *
+        *                      public void commitOffsets() { if (getState() ==
+        *           KafkaConsumer.State.CLOSED) { log.warn("commitOffsets() called
+        *           on closed KafkaConsumer " + getName()); return; }
+        *           fConnector.commitOffsets(); }
+        */
+
+       /**
+        * updating fLastTouch with current time in ms
+        */
+       public void touch() {
+               fLastTouch = System.currentTimeMillis();
+       }
+
+       /**
+        * getLastTouch() method returns long type value. returns fLastTouch
+        * variable value
+        * 
+        */
+       public long getLastTouch() {
+               return fLastTouch;
+       }
+
+       /**
+        * setting the kafkaConsumer state to closed
+        */
+       public synchronized boolean close() {
+
+               if (getState() == KafkaConsumer.State.CLOSED) {
+
+                       log.warn("close() called on closed KafkaConsumer " + getName());
+                       return true;
+               }
+
+               setState(KafkaConsumer.State.CLOSED);
+               // fConnector.shutdown();
+               boolean retVal = kafkaConnectorshuttask();
+               return retVal;
+
+       }
+
+       /* time out if the kafka shutdown fails for some reason */
+
+       private boolean kafkaConnectorshuttask() {
+               Callable<Boolean> run = new Callable<Boolean>() {
+                       @Override
+                       public Boolean call() throws Exception {
+                               // your code to be timed
+                               try {
+                               System.out.println("consumer closing....." + kConsumer);
+                                       kConsumer.close();
+                               } catch (Exception e) {
+                                       log.info("@@@@@@Kafka Stream shutdown erorr occurred " + getName() + " " + e);
+                               }
+                               log.info("Kafka connection closure with in 15 seconds by a Executors task");
+                               return true;
+                       }
+               };
+
+               RunnableFuture future = new FutureTask(run);
+               ExecutorService service = Executors.newSingleThreadExecutor();
+               service.execute(future);
+               Boolean result = null;
+               try {
+                       result = (Boolean) future.get(15, TimeUnit.SECONDS); // wait 1
+                                                                                                                                       // second
+               } catch (TimeoutException ex) {
+                       // timed out. Try to stop the code if possible.
+                       log.info("Timeout Occured - Kafka connection closure with in 15 seconds by a Executors task");
+                       future.cancel(true);
+               } catch (Exception ex) {
+                       // timed out. Try to stop the code if possible.
+                       log.info("Timeout Occured - Kafka connection closure with in 15 seconds by a Executors task" + ex);
+                       future.cancel(true);
+                       return false;
+               }
+               service.shutdown();
+               return true;
+       }
+
+       /**
+        * getConsumerGroup() returns Consumer group
+        * 
+        * @return
+        */
+       public String getConsumerGroup() {
+               return fGroup;
+       }
+
+       /**
+        * getConsumerId returns Consumer Id
+        * 
+        * @return
+        */
+       public String getConsumerId() {
+               return fId;
+       }
+
+       /**
+        * getState returns kafkaconsumer state
+        * 
+        * @return
+        */
+       private KafkaConsumer.State getState() {
+               return this.state;
+       }
+
+       /**
+        * setState() sets the kafkaConsumer state
+        * 
+        * @param state
+        */
+       private void setState(KafkaConsumer.State state) {
+               this.state = state;
+       }
+
+       // private ConsumerConnector fConnector;
+       private final String fTopic;
+       private final String fGroup;
+       private final String fId;
+       private final String fLogTag;
+       // private final KafkaStream<byte[], byte[]> fStream;
+       private final org.apache.kafka.clients.consumer.KafkaConsumer<String, String> kConsumer;
+       private long fCreateTimeMs;
+       private long fLastTouch;
+       private long offset;
+       private KafkaConsumer.State state;
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumer.class);
+       private final LinkedBlockingQueue<ConsumerRecord<String,String>> fPendingMsgs;
+       // private static final Logger log =
+       // LoggerFactory.getLogger(KafkaConsumer.class);
+
+       @Override
+       public void commitOffsets() {
+               if (getState() == KafkaConsumer.State.CLOSED) {
+                       log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
+                       return;
+               }
+               kConsumer.commitSync();
+               // fConsumer.close();
+
+       }
+
+
+
+       @Override
+       public void setOffset(long offsetval) {
+               // TODO Auto-generated method stub
+               offset = offsetval;
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumerCache.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumerCache.java
new file mode 100644 (file)
index 0000000..ac68a11
--- /dev/null
@@ -0,0 +1,729 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends.kafka;
+
+import com.att.aft.dme2.internal.springframework.beans.factory.annotation.Autowired;
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.metrics.CdmTimer;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Enumeration;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map.Entry;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+import org.I0Itec.zkclient.exception.ZkException;
+import org.I0Itec.zkclient.exception.ZkInterruptedException;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.imps.CuratorFrameworkState;
+import org.apache.curator.framework.recipes.cache.ChildData;
+import org.apache.curator.framework.recipes.cache.PathChildrenCache;
+import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent;
+import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
+import org.apache.curator.framework.state.ConnectionState;
+import org.apache.curator.framework.state.ConnectionStateListener;
+import org.apache.curator.utils.EnsurePath;
+import org.apache.curator.utils.ZKPaths;
+import org.apache.http.annotation.NotThreadSafe;
+import org.apache.zookeeper.KeeperException.NoNodeException;
+import org.onap.dmaap.dmf.mr.backends.Consumer;
+import org.onap.dmaap.dmf.mr.backends.MetricsSet;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+
+/**
+ * @NotThreadSafe but expected to be used within KafkaConsumerFactory, which
+ *                must be
+ * @author peter
+ *
+ */
+@NotThreadSafe
+public class KafkaConsumerCache {
+
+       private static KafkaConsumerCache kafkaconscache = null;
+
+       public static KafkaConsumerCache getInstance() {
+               if (kafkaconscache == null)
+                       kafkaconscache = new KafkaConsumerCache();
+
+               return kafkaconscache;
+       }
+
+       private static final String kSetting_ConsumerHandoverWaitMs = "cambria.consumer.cache.handoverWaitMs";
+       private static final int kDefault_ConsumerHandoverWaitMs = 500;
+
+       private static final String kSetting_SweepEverySeconds = "cambria.consumer.cache.sweepFreqSeconds";
+       private static final String kSetting_TouchEveryMs = "cambria.consumer.cache.touchFreqMs";
+
+       private static final String kSetting_ZkBasePath = "cambria.consumer.cache.zkBasePath";
+       private static final String kDefault_ZkBasePath = CambriaConstants.kDefault_ZkRoot + "/consumerCache";
+
+       // kafka defaults to timing out a client after 6 seconds of inactivity, but
+       // it heartbeats even when the client isn't fetching. Here, we don't
+       // want to prematurely rebalance the consumer group. Assuming clients are
+       // hitting
+       // the server at least every 30 seconds, timing out after 2 minutes should
+       // be okay.
+       // FIXME: consider allowing the client to specify its expected call rate?
+       private static final long kDefault_MustTouchEveryMs = 1000L*60*2;
+
+       // check for expirations pretty regularly
+       private static final long kDefault_SweepEverySeconds = 15;
+
+       private enum Status {
+               NOT_STARTED, CONNECTED, DISCONNECTED, SUSPENDED
+       }
+
+       
+       
+
+       @Autowired
+       private DMaaPErrorMessages errorMessages;
+
+       
+       /**
+        * User defined exception class for kafka consumer cache
+        * 
+        * @author nilanjana.maity
+        *
+        */
+       public class KafkaConsumerCacheException extends Exception {
+               /**
+                * To throw the exception
+                * 
+                * @param t
+                */
+               KafkaConsumerCacheException(Throwable t) {
+                       super(t);
+               }
+
+               /**
+                * 
+                * @param s
+                */
+               public KafkaConsumerCacheException(String s) {
+                       super(s);
+               }
+
+               private static final long serialVersionUID = 1L;
+       }
+
+       /**
+        * Creates a KafkaConsumerCache object. Before it is used, you must call
+        * startCache()
+        *
+        */
+       public KafkaConsumerCache() {
+
+               String strkSetting_ZkBasePath = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               kSetting_ZkBasePath);
+               if (null == strkSetting_ZkBasePath)
+                       strkSetting_ZkBasePath = kDefault_ZkBasePath;
+               fBaseZkPath = strkSetting_ZkBasePath;
+
+               fConsumers = new ConcurrentHashMap<>();
+               fSweepScheduler = Executors.newScheduledThreadPool(1);
+
+               curatorConsumerCache = null;
+
+               status = Status.NOT_STARTED;
+               // Watcher for consumer rebalancing across nodes. Kafka011 rebalancing
+               // work around
+
+               listener = new ConnectionStateListener() {
+                       public void stateChanged(CuratorFramework client, ConnectionState newState) {
+                               if (newState == ConnectionState.LOST) {
+
+                                       log.info("ZooKeeper connection expired");
+                                       handleConnectionLoss();
+                               } else if (newState == ConnectionState.READ_ONLY) {
+                                       log.warn("ZooKeeper connection set to read only mode.");
+                               } else if (newState == ConnectionState.RECONNECTED) {
+                                       log.info("ZooKeeper connection re-established");
+                                       handleReconnection();
+                               } else if (newState == ConnectionState.SUSPENDED) {
+                                       log.warn("ZooKeeper connection has been suspended.");
+                                       handleConnectionSuspended();
+                               }
+                       }
+               };
+       }
+
+       /**
+        * Start the cache service. This must be called before any get/put
+        * operations.
+        * 
+        * @param mode
+        *            DMAAP or cambria
+        * @param curator
+        * @throws IOException
+        * @throws KafkaConsumerCacheException
+        */
+       public void startCache(String mode, CuratorFramework curator) throws KafkaConsumerCacheException {
+
+               if (fApiId == null) {
+                       throw new IllegalArgumentException("API Node ID must be specified.");
+               }
+
+               try {
+
+                       if (mode != null && mode.equals(CambriaConstants.DMAAP)) {
+                               curator = getCuratorFramework(curator);
+                       }
+                       curator.getConnectionStateListenable().addListener(listener);
+                       setStatus(Status.CONNECTED);
+                       curatorConsumerCache = new PathChildrenCache(curator, fBaseZkPath, true);
+                       curatorConsumerCache.start();
+                       curatorConsumerCache.getListenable().addListener(new PathChildrenCacheListener() {
+                               public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception {
+                                       switch (event.getType()) {
+                                       case CHILD_ADDED: {
+                                               try {
+                                                       final String apiId = new String(event.getData().getData());
+                                                       final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
+
+                                                       log.info(apiId + " started consumer " + consumer);
+                                               } catch (Exception ex) {
+                                                       log.info("#Error Occured during Adding child" + ex);
+                                               }
+                                               break;
+                                       }
+                                       case CHILD_UPDATED: {
+                                               final String apiId = new String(event.getData().getData());
+                                               final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
+
+                                               if (fConsumers.containsKey(consumer)) {
+                                                       log.info(apiId + " claimed consumer " + consumer + " from " + fApiId
+                                                                       + " but wont hand over");
+                                                       // Commented so that it dont give the connection
+                                                       // until the active node is running for this client
+                                                       // id.
+                                                       dropClaimedConsumer(consumer);
+                                               }
+
+                                               break;
+                                       }
+                                       case CHILD_REMOVED: {
+                                               final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
+
+                                               if (fConsumers.containsKey(consumer)) {
+                                                       log.info("Someone wanted consumer " + consumer
+                                                                       + " gone;  but not removing it from the cache");
+                                                       dropConsumer(consumer, false);
+                                               }
+
+                                               break;
+                                       }
+
+                                       default:
+                                               break;
+                                       }
+                               }
+                       });
+
+                       // initialize the ZK path
+                       EnsurePath ensurePath = new EnsurePath(fBaseZkPath);
+                       ensurePath.ensure(curator.getZookeeperClient());
+
+                       
+                       
+                       long freq = kDefault_SweepEverySeconds;
+                       String strkSetting_SweepEverySeconds = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                                       kSetting_SweepEverySeconds);
+                       if (null != strkSetting_SweepEverySeconds) {
+                               freq = Long.parseLong(strkSetting_SweepEverySeconds);
+                       }
+
+                       fSweepScheduler.scheduleAtFixedRate(new sweeper(), freq, freq, TimeUnit.SECONDS);
+                       log.info("KafkaConsumerCache started");
+                       log.info("sweeping cached clients every " + freq + " seconds");
+               } catch (ZkException e) {
+                       log.error("@@@@@@ ZK Exception occured for  " + e);
+                       throw new KafkaConsumerCacheException(e);
+               } catch (Exception e) {
+                       log.error("@@@@@@  Exception occured for  " + e);
+                       throw new KafkaConsumerCacheException(e);
+               }
+       }
+
+       /**
+        * Getting the curator oject to start the zookeeper connection estabished
+        * 
+        * @param curator
+        * @return curator object
+        */
+       public static CuratorFramework getCuratorFramework(CuratorFramework curator) {
+               if (curator.getState() == CuratorFrameworkState.LATENT) {
+                       curator.start();
+
+                       try {
+                               curator.blockUntilConnected();
+                       } catch (InterruptedException e) {
+                               log.error("error while setting curator framework :",e);
+                               Thread.currentThread().interrupt();
+                       }
+               }
+
+               return curator;
+       }
+
+       /**
+        * Stop the cache service.
+        */
+       public void stopCache() {
+               setStatus(Status.DISCONNECTED);
+
+               final CuratorFramework curator = ConfigurationReader.getCurator();
+
+               if (curator != null) {
+                       try {
+                               curator.getConnectionStateListenable().removeListener(listener);
+                               curatorConsumerCache.close();
+                               log.info("Curator client closed");
+                       } catch (ZkInterruptedException e) {
+                               log.warn("Curator client close interrupted: ", e);
+                       } catch (IOException e) {
+                               log.warn("Error while closing curator PathChildrenCache for KafkaConsumerCache ", e);
+                       }
+
+                       curatorConsumerCache = null;
+               }
+
+               if (fSweepScheduler != null) {
+                       fSweepScheduler.shutdownNow();
+                       log.info("cache sweeper stopped");
+               }
+
+               if (fConsumers != null) {
+                       fConsumers.clear();
+                       fConsumers = null;
+               }
+
+               setStatus(Status.NOT_STARTED);
+
+               log.info("Consumer cache service stopped");
+       }
+
+       /**
+        * Get a cached consumer by topic, group, and id, if it exists (and remains
+        * valid) In addition, this method waits for all other consumer caches in
+        * the cluster to release their ownership and delete their version of this
+        * consumer.
+        * 
+        * @param topic
+        * @param consumerGroupId
+        * @param clientId
+        * @return a consumer, or null
+        */
+       public Kafka011Consumer getConsumerFor(String topic, String consumerGroupId, String clientId)
+                       throws KafkaConsumerCacheException {
+               if (getStatus() != Status.CONNECTED)
+                       throw new KafkaConsumerCacheException("The cache service is unavailable.");
+
+               final String consumerKey = makeConsumerKey(topic, consumerGroupId, clientId);
+               final Kafka011Consumer kc = fConsumers.get(consumerKey);
+
+               if (kc != null) {
+                       log.debug("Consumer cache hit for [" + consumerKey + "], last was at " + kc.getLastTouch());
+                       kc.touch();
+                       fMetrics.onKafkaConsumerCacheHit();
+               } else {
+                       log.debug("Consumer cache miss for [" + consumerKey + "]");
+                       fMetrics.onKafkaConsumerCacheMiss();
+               }
+
+               return kc;
+       }
+
+       /**
+        * Get a cached consumer by topic, group, and id, if it exists (and remains
+        * valid) In addition, this method waits for all other consumer caches in
+        * the cluster to release their ownership and delete their version of this
+        * consumer.
+        *
+        * @param topicgroup
+        * @param clientId
+        * @return a consumer, or null
+        */
+       public ArrayList<Kafka011Consumer> getConsumerListForCG(String topicgroup, String clientId)
+                       throws KafkaConsumerCacheException {
+               if (getStatus() != Status.CONNECTED)
+                       throw new KafkaConsumerCacheException("The cache service is unavailable.");
+               ArrayList<Kafka011Consumer> kcl = new ArrayList<>();
+
+
+               Enumeration<String> strEnum = fConsumers.keys();
+               String consumerLocalKey = null;
+               while (strEnum.hasMoreElements()) {
+                       consumerLocalKey = strEnum.nextElement();
+
+                       if (consumerLocalKey.startsWith(topicgroup) && (!consumerLocalKey.endsWith("::" + clientId))) {
+
+
+
+
+                               kcl.add(fConsumers.get(consumerLocalKey));
+
+                       }
+               }
+
+               return kcl;
+       }
+
+       public ArrayList<Kafka011Consumer> getConsumerListForCG(String group) throws KafkaConsumerCacheException {
+               if (getStatus() != Status.CONNECTED)
+                       throw new KafkaConsumerCacheException("The cache service is unavailable.");
+               ArrayList<Kafka011Consumer> kcl = new ArrayList<>();
+
+               Enumeration<String> strEnum = fConsumers.keys();
+               String consumerLocalKey = null;
+               while (strEnum.hasMoreElements()) {
+                       consumerLocalKey = strEnum.nextElement();
+
+                       if (consumerLocalKey.startsWith(group)) {
+
+
+                               kcl.add(fConsumers.get(consumerLocalKey));
+
+                       }
+               }
+
+               return kcl;
+       }
+
+       /**
+        * Put a consumer into the cache by topic, group and ID
+        *
+        * @param topic
+        * @param consumerGroupId
+        * @param consumerId
+        * @param consumer
+        * @throws KafkaConsumerCacheException
+        */
+       public void putConsumerFor(String topic, String consumerGroupId, String consumerId, Kafka011Consumer consumer)
+                       throws KafkaConsumerCacheException {
+               if (getStatus() != Status.CONNECTED)
+                       throw new KafkaConsumerCacheException("The cache service is unavailable.");
+
+               final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
+               fConsumers.put(consumerKey, consumer);
+
+
+
+               log.info("^@ Consumer Added to Cache Consumer Key" + consumerKey + " ApiId" + fApiId);
+       }
+
+       public Collection<? extends Consumer> getConsumers() {
+               return new LinkedList<>(fConsumers.values());
+       }
+
+       /**
+        * This method is to drop all the consumer
+        */
+       public void dropAllConsumers() {
+               for (Entry<String, Kafka011Consumer> entry : fConsumers.entrySet()) {
+                       dropConsumer(entry.getKey(), true);
+               }
+
+               // consumers should be empty here
+               if (fConsumers.size() > 0) {
+                       log.warn("During dropAllConsumers, the consumer map is not empty.");
+                       fConsumers.clear();
+               }
+       }
+
+       /**
+        * Drop a consumer from our cache due to a timeout
+        *
+        * @param key
+        */
+       private void dropTimedOutConsumer(String key) {
+               fMetrics.onKafkaConsumerTimeout();
+
+               if (!fConsumers.containsKey(key)) {
+                       log.warn("Attempted to drop a timed out consumer which was not in our cache: " + key);
+                       return;
+               }
+
+               // First, drop this consumer from our cache
+               boolean isdrop = dropConsumer(key, true);
+               if (!isdrop) {
+                       return;
+               }
+               final CuratorFramework curator = ConfigurationReader.getCurator();
+
+               try {
+                       curator.delete().guaranteed().forPath(fBaseZkPath + "/" + key);
+                       log.info(" ^ deleted " + fBaseZkPath + "/" + key);
+               } catch (NoNodeException e) {
+                       log.warn("A consumer was deleted from " + fApiId
+                                       + "'s cache, but no Cambria API node had ownership of it in ZooKeeper ", e);
+               } catch (Exception e) {
+                       log.debug("Unexpected exception while deleting consumer: ", e);
+                       log.info(" %%%%%%@# Unexpected exception while deleting consumer: ", e);
+               }
+
+               try {
+                       int consumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
+                       String strkSetting_ConsumerHandoverWaitMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                                       kSetting_ConsumerHandoverWaitMs);
+                       if (strkSetting_ConsumerHandoverWaitMs != null)
+                               consumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
+                       Thread.sleep(consumerHandoverWaitMs);
+               } catch (InterruptedException e) {
+                       log.error("InterruptedException in dropTimedOutConsumer",e);
+                       Thread.currentThread().interrupt();
+               }
+               log.info("Dropped " + key + " consumer due to timeout");
+       }
+
+       /**
+        * Drop a consumer from our cache due to another API node claiming it as
+        * their own.
+        *
+        * @param key
+        */
+       private void dropClaimedConsumer(String key) {
+               // if the consumer is still in our cache, it implies a claim.
+               if (fConsumers.containsKey(key)) {
+                       fMetrics.onKafkaConsumerClaimed();
+                       log.info("Consumer [" + key + "] claimed by another node.");
+               }
+               log.info("^dropping claimed Kafka consumer " + key);
+               dropConsumer(key, false);
+       }
+
+       /**
+        * Removes the consumer from the cache and closes its connection to the
+        * kafka broker(s).
+        *
+        * @param key
+        * @param dueToTimeout
+        */
+       private boolean dropConsumer(String key, boolean dueToTimeout) {
+               final Kafka011Consumer kc = fConsumers.get(key);
+               log.info("closing Kafka consumer " + key + " object " + kc);
+               if (kc != null) {
+
+                       if (kc.close()) {
+                               fConsumers.remove(key);
+
+                       } else {
+                               return false;
+                       }
+               }
+               return true;
+       }
+
+       // private final rrNvReadable fSettings;
+       private MetricsSet fMetrics;
+       private final String fBaseZkPath;
+       private final ScheduledExecutorService fSweepScheduler;
+       private String fApiId;
+
+       public void setfMetrics(final MetricsSet metrics) {
+               this.fMetrics = metrics;
+       }
+
+       public void setfApiId(final String id) {
+               this.fApiId = id;
+       }
+
+       private final ConnectionStateListener listener;
+
+       private ConcurrentHashMap<String, Kafka011Consumer> fConsumers;
+       private PathChildrenCache curatorConsumerCache;
+
+       private volatile Status status;
+
+       private void handleReconnection() {
+
+               log.info("Reading current cache data from ZK and synchronizing local cache");
+               final List<ChildData> cacheData = curatorConsumerCache.getCurrentData();
+               // Remove all the consumers in this API nodes cache that now belong to
+               // other API nodes.
+               for (ChildData cachedConsumer : cacheData) {
+                       final String consumerId = ZKPaths.getNodeFromPath(cachedConsumer.getPath());
+                       final String owningApiId = (cachedConsumer.getData() != null) ? new String(cachedConsumer.getData())
+                                       : "undefined";
+                       if (!fApiId.equals(owningApiId)) {
+                               fConsumers.remove(consumerId); // Commented to avoid removing
+                               // the value cache hashmap but the lock still exists.
+                               // This is not considered in kafka consumer Factory
+                               log.info("@@@ Validating current cache data from ZK and synchronizing local cache" + owningApiId
+                                               + " removing " + consumerId);
+                       }
+               }
+
+               setStatus(Status.CONNECTED);
+       }
+
+       private void handleConnectionSuspended() {
+               log.info("Suspending cache until ZK connection is re-established");
+
+               setStatus(Status.SUSPENDED);
+       }
+
+       private void handleConnectionLoss() {
+               log.info("Clearing consumer cache (shutting down all Kafka consumers on this node)");
+
+               setStatus(Status.DISCONNECTED);
+
+               closeAllCachedConsumers();
+               fConsumers.clear();
+       }
+
+       private void closeAllCachedConsumers() {
+               for (Entry<String, Kafka011Consumer> entry : fConsumers.entrySet()) {
+                       try {
+                               entry.getValue().close();
+                       } catch (Exception e) {
+                               log.info("@@@@@@ Error occurd while closing Clearing All cache " + e);
+                       }
+               }
+       }
+
+       private static String makeConsumerKey(String topic, String consumerGroupId, String clientId) {
+               return topic + "::" + consumerGroupId + "::" + clientId;
+       }
+
+       /**
+        * This method is to get a lock
+        *
+        * @param topic
+        * @param consumerGroupId
+        * @param consumerId
+        * @throws KafkaConsumerCacheException
+        */
+       public void signalOwnership(final String topic, final String consumerGroupId, final String consumerId)
+                       throws KafkaConsumerCacheException {
+               // get a lock at <base>/<topic>::<consumerGroupId>::<consumerId>
+               final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
+
+               try(final CdmTimer timer = new CdmTimer(fMetrics, "CacheSignalOwnership")) {
+                       final String consumerPath = fBaseZkPath + "/" + consumerKey;
+                       log.debug(fApiId + " attempting to claim ownership of consumer " + consumerKey);
+                       final CuratorFramework curator = ConfigurationReader.getCurator();
+
+                       try {
+                               curator.setData().forPath(consumerPath, fApiId.getBytes());
+                       } catch (NoNodeException e) {
+                           log.info("KeeperException.NoNodeException occured", e);
+                               curator.create().creatingParentsIfNeeded().forPath(consumerPath, fApiId.getBytes());
+                       }
+                       log.info(fApiId + " successfully claimed ownership of consumer " + consumerKey);
+                       timer.end();
+               } catch (Exception e) {
+                       log.error(fApiId + " failed to claim ownership of consumer " + consumerKey);
+                       throw new KafkaConsumerCacheException(e);
+               }
+
+               log.info("Backing off to give the Kafka broker time to clean up the ZK data for this consumer");
+
+               try {
+                       int consumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
+                       String strkSetting_ConsumerHandoverWaitMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                                       kSetting_ConsumerHandoverWaitMs);
+                       if (strkSetting_ConsumerHandoverWaitMs != null)
+                               consumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
+                       Thread.sleep(consumerHandoverWaitMs);
+               } catch (InterruptedException e) {
+                       log.error("InterruptedException in signalOwnership",e);
+                       //Thread.currentThread().interrupt();
+               }
+       }
+
+       public KafkaLiveLockAvoider2 getkafkaLiveLockAvoiderObj() {
+               return null;
+       }
+
+       public void sweep() {
+               final LinkedList<String> removals = new LinkedList<String>();
+               long mustTouchEveryMs = kDefault_MustTouchEveryMs;
+               String strkSetting_TouchEveryMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               kSetting_TouchEveryMs);
+               if (null != strkSetting_TouchEveryMs) {
+                       mustTouchEveryMs = Long.parseLong(strkSetting_TouchEveryMs);
+               }
+
+               
+               final long oldestAllowedTouchMs = System.currentTimeMillis() - mustTouchEveryMs;
+
+               for (Entry<String, Kafka011Consumer> e : fConsumers.entrySet()) {
+                       final long lastTouchMs = e.getValue().getLastTouch();
+                       log.debug("consumer #####1" + e.getKey() + "    " + lastTouchMs + " < " + oldestAllowedTouchMs);
+
+                       if (lastTouchMs < oldestAllowedTouchMs) {
+                               log.info("consumer " + e.getKey() + " has expired");
+                               removals.add(e.getKey());
+                       }
+               }
+
+               for (String key : removals) {
+                       dropTimedOutConsumer(key);
+               }
+       }
+
+       /**
+        * Creating a thread to run the sweep method
+        * 
+        * @author nilanjana.maity
+        *
+        */
+       private class sweeper implements Runnable {
+               /**
+                * run method
+                */
+               public void run() {
+                       sweep();
+               }
+       }
+
+       /**
+        * This method is to drop consumer
+        * 
+        * @param topic
+        * @param consumerGroup
+        * @param clientId
+        */
+       public void dropConsumer(String topic, String consumerGroup, String clientId) {
+               dropConsumer(makeConsumerKey(topic, consumerGroup, clientId), false);
+       }
+
+       private Status getStatus() {
+               return this.status;
+       }
+
+       private void setStatus(Status status) {
+               this.status = status;
+       }
+
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumerCache.class);
+       
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java
new file mode 100644 (file)
index 0000000..66ecc84
--- /dev/null
@@ -0,0 +1,158 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends.kafka;
+
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.recipes.locks.InterProcessMutex;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.Watcher;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PostConstruct;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+//@ComponentScan(basePackages="com.att.dmf.mr.backends.kafka")
+@Component
+public class KafkaLiveLockAvoider2 {
+       
+       public static final String ZNODE_ROOT = "/live-lock-avoid";
+       public static final String ZNODE_LOCKS = "/locks";
+       public static final String ZNODE_UNSTICK_TASKS ="/unstick-tasks";
+       
+       private static String locksPath = ZNODE_ROOT+ZNODE_LOCKS;
+       private static String tasksPath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS;
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaLiveLockAvoider2.class.getName());
+       
+       @Autowired
+       @Qualifier("curator")   
+       private CuratorFramework curatorFramework;
+       
+   @PostConstruct
+       public void init() {
+        log.info("Welcome......................................................................................");
+       try {
+               if (curatorFramework.checkExists().forPath(locksPath) == null) {
+                       curatorFramework.create().creatingParentsIfNeeded().forPath(locksPath);
+               }
+               if (curatorFramework.checkExists().forPath(tasksPath) == null) {
+                       curatorFramework.create().creatingParentsIfNeeded().forPath(tasksPath);
+               }
+               
+       } catch (Exception e) {
+               
+               log.error("Error during creation of permanent Znodes under /live-lock-avoid ",e);
+               
+       }
+       
+               
+       }
+       public void unlockConsumerGroup(String appId, String groupName) throws Exception {
+               
+               log.info("Signalling unlock to all conumsers of in group [{}] now, " ,  groupName);
+               
+               String fullLockPath = String.format("%s/%s", locksPath, groupName );
+               String fullTasksPath = null;
+               
+               try {
+
+                       //Use the Curator recipe for a Mutex lock, only one process can be broadcasting unlock instructions for a group
+                       InterProcessMutex lock = new InterProcessMutex(curatorFramework, fullLockPath);
+                       if ( lock.acquire(100L, TimeUnit.MILLISECONDS) ) 
+                       {
+                               try 
+                               {
+                                       List<String> taskNodes = curatorFramework.getChildren().forPath(tasksPath);
+                                       for (String taskNodeName : taskNodes) {
+                                               if(!taskNodeName.equals(appId)) {
+                                                       
+                                                       fullTasksPath = String.format("%s/%s/%s", tasksPath, taskNodeName, groupName);
+                                                       log.info("Writing groupName {} to path {}",groupName, fullTasksPath);
+                                                       
+                                                       
+                                                       if(curatorFramework.checkExists().forPath(fullTasksPath) != null) {
+                                                               curatorFramework.delete().forPath(fullTasksPath);
+                                                       }
+                                                       curatorFramework.create().withMode(CreateMode.EPHEMERAL).forPath(fullTasksPath);
+                                               }
+                                       }
+                                       
+
+                               }
+                               finally
+                               {
+                                       //Curator lock recipe requires a acquire() to be followed by a release()
+                                       lock.release();
+                               }
+                       }else {
+                               log.info("Could not obtain the avoider lock, another process has the avoider lock? {}", !lock.isAcquiredInThisProcess() );
+                       }
+
+
+               } catch (Exception e) {
+                       log.error("Error setting up either lock ZNode {} or task  ZNode {}",fullLockPath, fullTasksPath,e);
+                       throw e;
+               }
+               
+               
+       }
+       
+       /*
+        * Shoud be called once per MR server instance.
+        * 
+        */
+       public void startNewWatcherForServer(String appId, LiveLockAvoidance avoidanceCallback) {
+               LockInstructionWatcher instructionWatcher = new LockInstructionWatcher(curatorFramework,avoidanceCallback,this);
+               assignNewProcessNode(appId, instructionWatcher);
+               
+       }
+       
+       
+       protected void assignNewProcessNode(String appId, Watcher processNodeWatcher ) {
+               
+               String taskHolderZnodePath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS+"/"+appId;
+               
+               
+               try {
+                       
+                       if(curatorFramework.checkExists().forPath(taskHolderZnodePath) != null) {
+                               curatorFramework.delete().deletingChildrenIfNeeded().forPath(taskHolderZnodePath);
+
+                       }
+                       curatorFramework.create().forPath(taskHolderZnodePath);
+                       //setup the watcher
+                       curatorFramework.getChildren().usingWatcher(processNodeWatcher).inBackground().forPath(taskHolderZnodePath);
+                       log.info("Done creating task holder and watcher for APP name: {}",appId);
+                       
+               } catch (Exception e) {
+                       log.error("Could not add new processing node for name {}", appId, e);
+               }
+                               
+       }
+
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaPublisher.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaPublisher.java
new file mode 100644 (file)
index 0000000..62dc2a5
--- /dev/null
@@ -0,0 +1,204 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends.kafka;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.Producer;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.json.JSONException;
+import org.onap.dmaap.dmf.mr.backends.Publisher;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.utils.Utils;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.util.StringUtils;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Properties;
+
+
+/**
+ * Sends raw JSON objects into Kafka.
+ * 
+ * Could improve space: BSON rather than JSON?
+ * 
+ * @author peter
+ *
+ */
+
+public class KafkaPublisher implements Publisher {
+       /**
+        * constructor initializing
+        * 
+        * @param settings
+        * @throws rrNvReadable.missingReqdSetting
+        */
+       public KafkaPublisher(@Qualifier("propertyReader") rrNvReadable settings) throws rrNvReadable.missingReqdSetting {
+
+               final Properties props = new Properties();
+               String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list");
+               if(StringUtils.isEmpty(kafkaConnUrl)){
+                       
+                       kafkaConnUrl="localhost:9092";
+               }
+               
+       
+           if(Utils.isCadiEnabled()){
+               transferSetting( props, "sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
+               transferSetting( props, "security.protocol", "SASL_PLAINTEXT");
+               transferSetting( props, "sasl.mechanism", "PLAIN");     
+           }
+               transferSetting( props, "bootstrap.servers",kafkaConnUrl);
+                       
+               transferSetting( props, "request.required.acks", "1");
+               transferSetting( props, "message.send.max.retries", "5");
+               transferSetting(props, "retry.backoff.ms", "150"); 
+
+               
+               
+               props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+               props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+
+               
+               
+               fProducer = new KafkaProducer<>(props);
+       }
+
+       /**
+        * Send a message with a given topic and key.
+        * 
+        * @param msg
+        * @throws FailedToSendMessageException
+        * @throws JSONException
+        */
+       @Override
+       public void sendMessage(String topic, message msg) throws IOException{
+               final List<message> msgs = new LinkedList<>();
+               msgs.add(msg);
+               sendMessages(topic, msgs);
+       }
+
+       /**  
+        * method publishing batch messages
+       * This method is commented from 0.8 to 0.11 upgrade
+        * @param topic
+        * @param kms
+        * throws IOException
+        *
+       public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
+               try {
+                       fProducer.send(kms);
+
+               } catch (FailedToSendMessageException excp) { 
+                       log.error("Failed to send message(s) to topic [" + topic + "].", excp);
+                       throw new FailedToSendMessageException(excp.getMessage(), excp);
+               }
+
+       } */
+
+
+       /*
+        * Kafka 11.0 Interface
+        * @see com.att.nsa.cambria.backends.Publisher#sendBatchMessageNew(java.lang.String, java.util.ArrayList)
+        */
+       public void sendBatchMessageNew(String topic, ArrayList <ProducerRecord<String,String>> kms) throws IOException {
+               try {
+                       for (ProducerRecord<String,String> km : kms) {
+                               fProducer.send(km);
+                       }
+
+               } catch (Exception excp) { 
+                       log.error("Failed to send message(s) to topic [" + topic + "].", excp);
+                       throw new IOException(excp.getMessage(), excp);
+               }
+
+       }
+       
+       /**
+        * Send a set of messages. Each must have a "key" string value.
+        * 
+        * @param topic
+        * @param msg
+        * @throws FailedToSendMessageException
+        * @throws JSONException
+        *
+       @Override
+       public void sendMessages(String topic, List<? extends message> msgs)
+                       throws IOException, FailedToSendMessageException {
+               log.info("sending " + msgs.size() + " events to [" + topic + "]");
+
+               final List<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(msgs.size());
+               for (message o : msgs) {
+                       final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, o.getKey(), o.toString());
+                       kms.add(data);
+               }
+               try {
+                       fProducer.send(kms);
+
+               } catch (FailedToSendMessageException excp) {
+                       log.error("Failed to send message(s) to topic [" + topic + "].", excp);
+                       throw new FailedToSendMessageException(excp.getMessage(), excp);
+               }
+       } */
+       @Override
+    public void sendMessagesNew(String topic, List<? extends message> msgs) throws IOException {
+        log.info("sending " + msgs.size() + " events to [" + topic + "]");
+        try {
+            for (message o : msgs) {
+                final ProducerRecord<String, String> data =
+                        new ProducerRecord<>(topic, o.getKey(), o.toString());
+                fProducer.send(data);
+            }
+        } catch (Exception e) {
+            log.error("Failed to send message(s) to topic [" + topic + "].", e);
+        }
+    }
+
+       
+       private Producer<String, String> fProducer;
+
+  /**
+   * It sets the key value pair
+   * @param topic
+   * @param msg 
+   * @param key
+   * @param defVal
+   */
+       private void transferSetting(Properties props, String key, String defVal) {
+               String kafkaProp= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka." + key);
+               if (StringUtils.isEmpty(kafkaProp)) kafkaProp=defVal;
+               props.put(key, kafkaProp);
+       }
+
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class);
+
+       @Override
+       public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
+               // TODO Auto-generated method stub
+               
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LiveLockAvoidance.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LiveLockAvoidance.java
new file mode 100644 (file)
index 0000000..4aa8a97
--- /dev/null
@@ -0,0 +1,45 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends.kafka;
+
+
+
+/**
+ * Live Lock Avoidance interface.  To be implemented by the main message router client
+ *
+ */
+public interface LiveLockAvoidance {
+       
+       /**
+        * Gets the unique id
+        * @return the unique id for the Message Router server instance
+        */
+       String getAppId();
+       
+       
+       /**
+        * Main callback to inform the local MR server instance that all consumers in a group need to soft poll
+        * @param groupName name of the Kafka consumer group needed a soft poll
+        */
+       void handleRebalanceUnlock( String groupName);
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LockInstructionWatcher.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LockInstructionWatcher.java
new file mode 100644 (file)
index 0000000..7dc41bd
--- /dev/null
@@ -0,0 +1,99 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends.kafka;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.zookeeper.WatchedEvent;
+import org.apache.zookeeper.Watcher;
+
+import java.util.List;
+
+/**
+ * 
+ * LockInstructionWatcher
+ * A package-private class used internally by the KafkaLiveLockAvoider.  
+ * 
+ * This class implements the zookeeper Watcher callback and listens for changes on child nodes changing.
+ * Each child node is actually a Kafka group name that needs to be soft polled.  Deletion of the child nodes
+ * after soft poll unlocking is finished.
+ * 
+ *
+ */
+public class LockInstructionWatcher implements Watcher {
+       
+       private CuratorFramework curatorFramework;
+       private LiveLockAvoidance avoidanceCallback;
+       private KafkaLiveLockAvoider2 avoider;
+       
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(LockInstructionWatcher.class.getName());
+       
+
+       public LockInstructionWatcher(CuratorFramework curatorFramework, LiveLockAvoidance avoidanceCallback,
+                       KafkaLiveLockAvoider2 avoider) {
+               super();
+               this.curatorFramework = curatorFramework;
+               this.avoidanceCallback = avoidanceCallback;
+               this.avoider = avoider;
+       }
+
+
+       @Override
+       public void process(WatchedEvent event) {
+               
+               switch (event.getType()) {
+               case NodeChildrenChanged:
+                       
+
+                       try {
+                               
+                               log.info("node children changed at path: {}", event.getPath());
+                               
+                               List<String> children = curatorFramework.getChildren().forPath(event.getPath());
+                               
+                               log.info("found children nodes prodcessing now");
+                               for (String child : children) {
+                                       String childPath = String.format("%s/%s", event.getPath(), child);
+                                       log.info("Processing child task at node {}",childPath);
+                                       avoidanceCallback.handleRebalanceUnlock( child);
+                                       log.info("Deleting child task at node {}",childPath);
+                                       curatorFramework.delete().forPath(childPath);
+                                       } 
+                               //reset the watch with the avoider
+                               avoider.assignNewProcessNode(avoidanceCallback.getAppId(), this);
+                       
+                               
+                       } catch (Exception e) {
+                               log.error("Error manipulating ZNode data in watcher",e);
+                       }
+                       
+                       break;
+
+               default:
+                       log.info("Listner fired on path: {}, with event: {}", event.getPath(), event.getType());
+                       break;
+               }
+       }
+       
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryConsumerFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryConsumerFactory.java
new file mode 100644 (file)
index 0000000..f833eed
--- /dev/null
@@ -0,0 +1,185 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends.memory;
+
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.Consumer;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class MemoryConsumerFactory implements ConsumerFactory
+{
+
+       private final MemoryQueue fQueue;
+       
+       /**
+        * 
+        * Initializing constructor
+        * @param q
+        */
+       public MemoryConsumerFactory ( MemoryQueue q )
+       {
+               fQueue = q;
+       }
+
+       /**
+        * 
+        * @param topic
+        * @param consumerGroupId
+        * @param clientId
+        * @param timeoutMs
+        * @return Consumer
+        */
+       @Override
+       public Consumer getConsumerFor ( String topic, String consumerGroupId, String clientId, int timeoutMs, String remotehost )
+       {
+               return new MemoryConsumer ( topic, consumerGroupId );
+       }
+
+       /**
+        * 
+        * Define nested inner class
+        *
+        */
+       private class MemoryConsumer implements Consumer
+       {
+
+               private final String fTopic;
+               private final String fConsumer;
+               private final long fCreateMs;
+               private long fLastAccessMs;
+               
+               /**
+                * 
+                * Initializing MemoryConsumer constructor 
+                * @param topic
+                * @param consumer
+                * 
+                */
+               public MemoryConsumer ( String topic, String consumer )
+               {
+                       fTopic = topic;
+                       fConsumer = consumer;
+                       fCreateMs = System.currentTimeMillis ();
+                       fLastAccessMs = fCreateMs;
+               }
+
+               @Override
+               /**
+                * 
+                * return consumer details  
+                */
+               public Message nextMessage ()
+               {
+                       return fQueue.get ( fTopic, fConsumer );
+               }
+
+               @Override
+               public boolean close() {
+                       //Nothing to close/clean up.
+                       return true;
+               }
+               /**
+                * 
+                */
+               public void commitOffsets()
+               {
+                       // ignoring this aspect
+               }
+               /**
+                * get offset
+                */
+               public long getOffset()
+               {
+                       return 0;
+               }
+
+               @Override
+               /**
+                * get consumer topic name
+                */
+               public String getName ()
+               {
+                       return fTopic + "/" + fConsumer;
+               }
+
+               @Override
+               public long getCreateTimeMs ()
+               {
+                       return fCreateMs;
+               }
+
+               @Override
+               public long getLastAccessMs ()
+               {
+                       return fLastAccessMs;
+               }
+
+               
+
+               @Override
+               public void setOffset(long offset) {
+                       // TODO Auto-generated method stub
+                       
+               }
+
+               
+       }
+
+       @Override
+       public void destroyConsumer(String topic, String consumerGroupId,
+                       String clientId) {
+               //No cache for memory consumers, so NOOP
+       }
+
+       @Override
+       public void dropCache ()
+       {
+               // nothing to do - there's no cache here
+       }
+
+       @Override
+       /**
+        * @return ArrayList<MemoryConsumer>
+        */
+       public Collection<? extends Consumer> getConsumers ()
+       {
+               return new ArrayList<> ();
+       }
+
+       @Override
+       public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs,
+                       String remotehost) throws UnavailableException, CambriaApiException {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryMetaBroker.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryMetaBroker.java
new file mode 100644 (file)
index 0000000..96d3eb5
--- /dev/null
@@ -0,0 +1,197 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends.memory;
+
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaApiKey;
+import org.onap.dmaap.dmf.mr.metabroker.Broker;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+
+import java.util.*;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class MemoryMetaBroker implements Broker {
+
+       private final MemoryQueue fQueue;
+       private final HashMap<String, MemTopic> fTopics;
+       
+       /**
+        * 
+        * @param mq
+        * @param configDb
+        * @param settings
+        */
+       public MemoryMetaBroker(MemoryQueue mq, ConfigDb configDb) {
+       
+               fQueue = mq;
+               fTopics = new HashMap<>();
+       }
+
+       @Override
+       public List<Topic> getAllTopics() {
+               return new LinkedList<Topic>(fTopics.values());
+       }
+
+       @Override
+       public Topic getTopic(String topic) {
+               return fTopics.get(topic);
+       }
+
+       @Override
+       public Topic createTopic(String topic, String desc, String ownerApiId, int partitions, int replicas,
+                       boolean transactionEnabled) throws TopicExistsException {
+               if (getTopic(topic) != null) {
+                       throw new TopicExistsException(topic);
+               }
+               fQueue.createTopic(topic);
+               fTopics.put(topic, new MemTopic(topic, desc, ownerApiId, transactionEnabled));
+               return getTopic(topic);
+       }
+
+       @Override
+       public void deleteTopic(String topic) {
+               fTopics.remove(topic);
+               fQueue.removeTopic(topic);
+       }
+
+       private static class MemTopic implements Topic {
+
+               private final String fName;
+               private final String fDesc;
+               private final String fOwner;
+               private NsaAcl fReaders;
+               private NsaAcl fWriters;
+               private boolean ftransactionEnabled;
+               private String accessDenied = "User does not own this topic ";
+               
+               /**
+                * constructor initialization
+                * 
+                * @param name
+                * @param desc
+                * @param owner
+                * @param transactionEnabled
+                */
+               public MemTopic(String name, String desc, String owner, boolean transactionEnabled) {
+                       fName = name;
+                       fDesc = desc;
+                       fOwner = owner;
+                       ftransactionEnabled = transactionEnabled;
+                       fReaders = null;
+                       fWriters = null;
+               }
+
+               @Override
+               public String getOwner() {
+                       return fOwner;
+               }
+
+               @Override
+               public NsaAcl getReaderAcl() {
+                       return fReaders;
+               }
+
+               @Override
+               public NsaAcl getWriterAcl() {
+                       return fWriters;
+               }
+
+               @Override
+               public void checkUserRead(NsaApiKey user) throws AccessDeniedException {
+                       if (fReaders != null && (user == null || !fReaders.canUser(user.getKey()))) {
+                               throw new AccessDeniedException(user == null ? "" : user.getKey());
+                       }
+               }
+
+               @Override
+               public void checkUserWrite(NsaApiKey user) throws AccessDeniedException {
+                       if (fWriters != null && (user == null || !fWriters.canUser(user.getKey()))) {
+                               throw new AccessDeniedException(user == null ? "" : user.getKey());
+                       }
+               }
+
+               @Override
+               public String getName() {
+                       return fName;
+               }
+
+               @Override
+               public String getDescription() {
+                       return fDesc;
+               }
+
+               @Override
+               public void permitWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException {
+                       if (!fOwner.equals(asUser.getKey())) {
+                               throw new AccessDeniedException(accessDenied + fName);
+                       }
+                       if (fWriters == null) {
+                               fWriters = new NsaAcl();
+                       }
+                       fWriters.add(publisherId);
+               }
+
+               @Override
+               public void denyWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException {
+                       if (!fOwner.equals(asUser.getKey())) {
+                               throw new AccessDeniedException(accessDenied + fName);
+                       }
+                       fWriters.remove(publisherId);
+               }
+
+               @Override
+               public void permitReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException {
+                       if (!fOwner.equals(asUser.getKey())) {
+                               throw new AccessDeniedException(accessDenied + fName);
+                       }
+                       if (fReaders == null) {
+                               fReaders = new NsaAcl();
+                       }
+                       fReaders.add(consumerId);
+               }
+
+               @Override
+               public void denyReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException {
+                       if (!fOwner.equals(asUser.getKey())) {
+                               throw new AccessDeniedException(accessDenied + fName);
+                       }
+                       fReaders.remove(consumerId);
+               }
+
+               @Override
+               public boolean isTransactionEnabled() {
+                       return ftransactionEnabled;
+               }
+
+               @Override
+               public Set<String> getOwners() {
+                       final TreeSet<String> set = new TreeSet<> ();
+                       set.add ( fOwner );
+                       return set;
+               }
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueue.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueue.java
new file mode 100644 (file)
index 0000000..ef8b128
--- /dev/null
@@ -0,0 +1,207 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends.memory;
+
+import org.onap.dmaap.dmf.mr.backends.Consumer;
+import org.onap.dmaap.dmf.mr.backends.Publisher.message;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+
+/**
+ * When broker type is memory, then this class is doing all the topic related
+ * operations
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class MemoryQueue {
+       // map from topic to list of msgs
+       private HashMap<String, LogBuffer> fQueue;
+       private HashMap<String, HashMap<String, Integer>> fOffsets;
+
+       /**
+        * constructor storing hashMap objects in Queue and Offsets object
+        */
+       public MemoryQueue() {
+               fQueue = new HashMap<>();
+               fOffsets = new HashMap<>();
+       }
+
+       /**
+        * method used to create topic
+        * 
+        * @param topic
+        */
+       public synchronized void createTopic(String topic) {
+               LogBuffer q = fQueue.get(topic);
+               if (q == null) {
+                       q = new LogBuffer(1024 * 1024);
+                       fQueue.put(topic, q);
+               }
+       }
+
+       /**
+        * method used to remove topic
+        * 
+        * @param topic
+        */
+       public synchronized void removeTopic(String topic) {
+               LogBuffer q = fQueue.get(topic);
+               if (q != null) {
+                       fQueue.remove(topic);
+               }
+       }
+
+       /**
+        * method to write message on topic
+        * 
+        * @param topic
+        * @param m
+        */
+       public synchronized void put(String topic, message m) {
+               LogBuffer q = fQueue.get(topic);
+               if (q == null) {
+                       createTopic(topic);
+                       q = fQueue.get(topic);
+               }
+               q.push(m.getMessage());
+       }
+
+       /**
+        * method to read consumer messages
+        * 
+        * @param topic
+        * @param consumerName
+        * @return
+        */
+       public synchronized Consumer.Message get(String topic, String consumerName) {
+               final LogBuffer q = fQueue.get(topic);
+               if (q == null) {
+                       return null;
+               }
+
+               HashMap<String, Integer> offsetMap = fOffsets.get(consumerName);
+               if (offsetMap == null) {
+                       offsetMap = new HashMap<>();
+                       fOffsets.put(consumerName, offsetMap);
+               }
+               Integer offset = offsetMap.get(topic);
+               if (offset == null) {
+                       offset = 0;
+               }
+
+               final msgInfo result = q.read(offset);
+               if (result != null && result.msg != null) {
+                       offsetMap.put(topic, result.offset + 1);
+               }
+               return result;
+       }
+
+       /**
+        * static inner class used to details about consumed messages
+        * 
+        * @author anowarul.islam
+        *
+        */
+       private static class msgInfo implements Consumer.Message {
+               /**
+                * published message which is consumed
+                */
+               public String msg;
+               /**
+                * offset associated with message
+                */
+               public int offset;
+
+               /**
+                * get offset of messages
+                */
+               @Override
+               public long getOffset() {
+                       return offset;
+               }
+
+               /**
+                * get consumed message
+                */
+               @Override
+               public String getMessage() {
+                       return msg;
+               }
+       }
+
+ /**
+ * 
+ * @author sneha.d.desai
+ *
+ * private LogBuffer class has synchronized push and read method
+ */
+       private class LogBuffer {
+               private int fBaseOffset;
+               private final int fMaxSize;
+               private final ArrayList<String> fList;
+
+               /**
+                * constructor initializing the offset, maxsize and list
+                * 
+                * @param maxSize
+                */
+               public LogBuffer(int maxSize) {
+                       fBaseOffset = 0;
+                       fMaxSize = maxSize;
+                       fList = new ArrayList<>();
+               }
+
+               /**
+                * pushing message
+                * 
+                * @param msg
+                */
+               public synchronized void push(String msg) {
+                       fList.add(msg);
+                       while (fList.size() > fMaxSize) {
+                               fList.remove(0);
+                               fBaseOffset++;
+                       }
+               }
+
+               /**
+                * reading messages
+                * 
+                * @param offset
+                * @return
+                */
+               public synchronized msgInfo read(int offset) {
+                       final int actual = Math.max(0, offset - fBaseOffset);
+
+                       final msgInfo mi = new msgInfo();
+                       mi.msg = (actual >= fList.size()) ? null : fList.get(actual);
+                       if (mi.msg == null)
+                               return null;
+
+                       mi.offset = actual + fBaseOffset;
+                       return mi;
+               }
+
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueuePublisher.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueuePublisher.java
new file mode 100644 (file)
index 0000000..bb7ad02
--- /dev/null
@@ -0,0 +1,90 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends.memory;
+
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.onap.dmaap.dmf.mr.backends.Publisher;
+import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class MemoryQueuePublisher implements Publisher {
+       /**
+        * 
+        * @param q
+        * @param b
+        */
+       public MemoryQueuePublisher(MemoryQueue q, MemoryMetaBroker b) {
+               fBroker = b;
+               fQueue = q;
+       }
+
+       
+       /**
+        * 
+        * @param topic
+        * @param msg
+        * @throws IOException
+        */
+       @Override
+       public void sendMessage(String topic, message msg) throws IOException {
+               if (null == fBroker.getTopic(topic)) {
+                       try {
+                               fBroker.createTopic(topic, topic, null, 8, 3, false);
+                       } catch (TopicExistsException e) {
+                               throw new RuntimeException(e);
+                       }
+               }
+               fQueue.put(topic, msg);
+       }
+
+       @Override
+       /**
+        * @param topic
+        * @param msgs
+        * @throws IOException
+        */
+
+       public void sendBatchMessageNew(String topic, ArrayList<ProducerRecord<String, String>> kms) throws IOException {
+
+       }
+
+       public void sendMessagesNew(String topic, List<? extends message> msgs) throws IOException {
+       }
+
+       public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
+               for (message m : msgs) {
+                       sendMessage(topic, m);
+               }
+       }
+
+       private final MemoryMetaBroker fBroker;
+       private final MemoryQueue fQueue;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MessageLogger.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MessageLogger.java
new file mode 100644 (file)
index 0000000..170c1f3
--- /dev/null
@@ -0,0 +1,107 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.backends.memory;
+
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.onap.dmaap.dmf.mr.backends.Publisher;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+
+/**
+ * class used for logging perspective
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class MessageLogger implements Publisher {
+       public MessageLogger() {
+       }
+
+       public void setFile(File f) throws FileNotFoundException {
+               fStream = new FileOutputStream(f, true);
+       }
+
+       /** 
+        * 
+        * @param topic
+        * @param msg
+        * @throws IOException
+        */
+       @Override
+       public void sendMessage(String topic, message msg) throws IOException {
+               logMsg(msg);
+       }
+
+       /**
+        * @param topic
+        * @param msgs
+        * @throws IOException
+        */
+       @Override
+       public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
+               for (message m : msgs) {
+                       logMsg(m);
+               }
+       }
+
+       /**
+        * @param topic
+        * @param kms
+        * @throws IOException
+       
+       @Override
+       public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws
+
+       IOException {
+       }
+ */
+       private FileOutputStream fStream;
+
+       /**
+        * 
+        * @param msg
+        * @throws IOException
+        */
+       private void logMsg(message msg) throws IOException {
+               String key = msg.getKey();
+               if (key == null)
+                       key = "<none>";
+
+               fStream.write('[');
+               fStream.write(key.getBytes());
+               fStream.write("] ".getBytes());
+               fStream.write(msg.getMessage().getBytes());
+               fStream.write('\n');
+       }
+       public void sendBatchMessageNew(String topic, ArrayList<ProducerRecord<String, String>> kms) throws IOException {
+
+       }
+
+       public void sendMessagesNew(String topic, List<? extends message> msgs) throws IOException {
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/ApiKeyBean.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/ApiKeyBean.java
new file mode 100644 (file)
index 0000000..dd0083f
--- /dev/null
@@ -0,0 +1,88 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.beans;
+
+import com.att.nsa.drumlin.till.data.uniqueStringGenerator;
+
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+@XmlRootElement
+public class ApiKeyBean implements Serializable {
+
+       private static final long serialVersionUID = -8219849086890567740L;
+       
+       private static final String KEY_CHARS = "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+       
+       private String email;
+       private String description;
+       /**
+        * constructor
+        */
+       public ApiKeyBean() {
+               super();
+       }
+/**
+ * 
+ * @param email
+ * @param description
+ */
+       public ApiKeyBean(String email, String description) {
+               super();
+               this.email = email;
+               this.description = description;
+       }
+
+       public String getEmail() {
+               return email;
+       }
+
+       public void setEmail(String email) {
+               this.email = email;
+       }
+
+       public String getDescription() {
+               return description;
+       }
+
+       public void setDescription(String description) {
+               this.description = description;
+       }
+
+       public String getKey() {
+               return generateKey(16);
+       }
+
+       public String getSharedSecret() {
+               return generateKey(24);
+       }
+       
+       private static String generateKey ( int length  ) {
+               return uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length );
+       }
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPCambriaLimiter.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPCambriaLimiter.java
new file mode 100644 (file)
index 0000000..dd479b9
--- /dev/null
@@ -0,0 +1,247 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.beans;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.metrics.impl.CdmRateTicker;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode;
+import org.onap.dmaap.dmf.mr.exception.ErrorResponse;
+import org.onap.dmaap.dmf.mr.utils.Utils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import java.util.Date;
+import java.util.HashMap;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * class provide rate information
+ * 
+ * @author anowarul.islam
+ *
+ */
+@Component
+public class DMaaPCambriaLimiter {
+       private final HashMap<String, RateInfo> fRateInfo;
+       private final double fMaxEmptyPollsPerMinute;
+       private final double fMaxPollsPerMinute;
+       private final int fWindowLengthMins;
+       private final long fSleepMs;
+       private final long fSleepMs1;
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPCambriaLimiter.class);
+       
+       /**
+        * constructor initializes
+        * 
+        * @param settings
+        * @throws missingReqdSetting
+        * @throws invalidSettingValue
+        */
+       @Autowired
+       public DMaaPCambriaLimiter(@Qualifier("propertyReader") rrNvReadable settings) {
+                       fRateInfo = new HashMap<>();
+               fMaxEmptyPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxEmptyPollsPerMinute,
+                               CambriaConstants.kDefault_MaxEmptyPollsPerMinute);
+               fMaxPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxPollsPerMinute,
+                               30);
+               fWindowLengthMins = settings.getInt(CambriaConstants.kSetting_RateLimitWindowLength,
+                               CambriaConstants.kDefault_RateLimitWindowLength);
+               fSleepMs = settings.getLong(CambriaConstants.kSetting_SleepMsOnRateLimit,
+                               CambriaConstants.kDefault_SleepMsOnRateLimit);
+               fSleepMs1 = settings.getLong(CambriaConstants.kSetting_SleepMsRealOnRateLimit,
+                               5000);
+               
+       }
+       
+       /**
+        * Construct a rate limiter.
+        * 
+        * @param maxEmptyPollsPerMinute
+        *            Pass <= 0 to deactivate rate limiting.
+        *            @param windowLengthMins
+        */
+       public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, double maxPollsPerMinute,int windowLengthMins) {
+               this(maxEmptyPollsPerMinute,maxPollsPerMinute, windowLengthMins, getSleepMsForRate(maxEmptyPollsPerMinute),getSleepMsForRate(1));
+       }
+
+       /**
+        * Construct a rate limiter
+        * 
+        * @param maxEmptyPollsPerMinute
+        *            Pass <= 0 to deactivate rate limiting.
+        * @param sleepMs
+        * @param windowLengthMins
+        */
+       public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute,double maxPollsPerMinute, int windowLengthMins, long sleepMs ,long sleepMS1) {
+               fRateInfo = new HashMap<>();
+               fMaxEmptyPollsPerMinute = Math.max(0, maxEmptyPollsPerMinute);
+               fMaxPollsPerMinute = Math.max(0, maxPollsPerMinute);
+               fWindowLengthMins = windowLengthMins;
+               fSleepMs = Math.max(0, sleepMs);
+               fSleepMs1 = Math.max(0, sleepMS1);
+       }
+
+       /**
+        * static method provide the sleep time
+        * 
+        * @param ratePerMinute
+        * @return
+        */
+       public static long getSleepMsForRate(double ratePerMinute) {
+               if (ratePerMinute <= 0.0)
+                       return 0;
+               return Math.max(1000, Math.round(60 * 1000 / ratePerMinute));
+       }
+
+       /**
+        * Tell the rate limiter about a call to a topic/group/id. If the rate is
+        * too high, this call delays its return and throws an exception.
+        * 
+        * @param topic
+        * @param consumerGroup
+        * @param clientId
+        * @throws CambriaApiException
+        */
+       public void onCall(String topic, String consumerGroup, String clientId,String remoteHost) throws CambriaApiException {
+               // do nothing if rate is configured 0 or less
+               if (fMaxEmptyPollsPerMinute <= 0) {
+                       return;
+               }
+                               // setup rate info for this tuple
+               final RateInfo ri = getRateInfo(topic, consumerGroup, clientId);
+               final double rate = ri.onCall();
+               log.info(ri.getLabel() + ": " + rate + " empty replies/minute.");
+               if (rate > fMaxEmptyPollsPerMinute) {
+                       try {
+                               log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxPollsPerMinute
+                                               + ".");
+                               if (fSleepMs > 0) {
+                                       log.warn(ri.getLabel() + ": " + "Slowing response with " + fSleepMs
+                                                       + " ms sleep, then responding in error.");
+                                       Thread.sleep(fSleepMs);
+                                       
+                               } else {
+                                       log.info(ri.getLabel() + ": " + "No sleep configured, just throwing error.");
+                               }
+                       } catch (InterruptedException e) {
+                               log.error("Exception "+ e);
+                               Thread.currentThread().interrupt();
+                       }
+                       
+                       
+                       ErrorResponse errRes = new ErrorResponse(HttpStatusCodes.k429_tooManyRequests, 
+                                       DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(), 
+                                       "This client is making too many requests. Please use a long poll "
+                                                       + "setting to decrease the number of requests that result in empty responses. ","",Utils.getFormattedDate(new Date()),topic,"","",consumerGroup+"/"+clientId,remoteHost);
+                       
+                       log.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               }
+               
+               
+       }
+
+       /**
+        * 
+        * @param topic
+        * @param consumerGroup
+        * @param clientId
+        * @param sentCount
+        */
+       public void onSend(String topic, String consumerGroup, String clientId, long sentCount) {
+               // check for good replies
+               if (sentCount > 0) {
+                       // that was a good send, reset the metric
+                       getRateInfo(topic, consumerGroup, clientId).reset();
+               }
+       }
+
+       private static class RateInfo {
+               private final String fLabel;
+               private final CdmRateTicker fCallRateSinceLastMsgSend;
+               /**
+                * constructor initialzes
+                * 
+                * @param label
+                * @param windowLengthMinutes
+                */
+               public RateInfo(String label, int windowLengthMinutes) {
+                       fLabel = label;
+                       fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES,
+                                       windowLengthMinutes, TimeUnit.MINUTES);
+               }
+               
+               public String getLabel() {
+                       return fLabel;
+               }
+
+               /**
+                * CdmRateTicker is reset
+                */
+               public void reset() {
+                       fCallRateSinceLastMsgSend.reset();
+               }
+
+               /**
+                * 
+                * @return
+                */
+               public double onCall() {
+                       fCallRateSinceLastMsgSend.tick();
+                       return fCallRateSinceLastMsgSend.getRate();
+               }
+       }
+       
+       
+       
+       
+       
+       
+       
+       private RateInfo getRateInfo(String topic, String consumerGroup, String clientId) {
+               final String key = makeKey(topic, consumerGroup, clientId);
+               RateInfo ri = fRateInfo.get(key);
+               if (ri == null) {
+                       ri = new RateInfo(key, fWindowLengthMins);
+                       fRateInfo.put(key, ri);
+               }
+               return ri;
+       }
+       
+       
+       
+
+       
+       
+       
+       private String makeKey(String topic, String group, String id) {
+               return topic + "::" + group + "::" + id;
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPContext.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPContext.java
new file mode 100644 (file)
index 0000000..1b471a2
--- /dev/null
@@ -0,0 +1,103 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.beans;
+
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+/**
+ * DMaaPContext provide and maintain all the configurations , Http request/response
+ * Session and consumer Request Time
+ * @author nilanjana.maity
+ *
+ */
+public class DMaaPContext {
+
+    private ConfigurationReader configReader;
+    private HttpServletRequest request;
+    private HttpServletResponse response;
+    private HttpSession session;
+    private String consumerRequestTime;
+    static int i=0;
+    
+    public synchronized static long getBatchID() {
+       try{
+               final long metricsSendTime = System.currentTimeMillis();
+               final Date d = new Date(metricsSendTime);
+               final String text = new SimpleDateFormat("ddMMyyyyHHmmss").format(d);
+               long dt= Long.valueOf(text)+i;
+               i++;
+               return dt;
+       }
+       catch(NumberFormatException ex){
+               return 0;
+       }
+    }
+    
+    public HttpServletRequest getRequest() {
+        return request;
+    }
+
+    public void setRequest(HttpServletRequest request) {
+        this.request = request;
+    }
+
+    public HttpServletResponse getResponse() {
+        return response;
+    }
+
+    public void setResponse(HttpServletResponse response) {
+        this.response = response;
+    }
+
+    public HttpSession getSession() {
+        this.session = request.getSession();
+        return session;
+    }
+
+    public void setSession(HttpSession session) {
+        this.session = session;
+    }
+
+    public ConfigurationReader getConfigReader() {
+         return configReader;
+    }
+
+    public void setConfigReader(ConfigurationReader configReader) {
+        this.configReader = configReader;
+    }
+
+    public String getConsumerRequestTime() {
+        return consumerRequestTime;
+    }
+
+    public void setConsumerRequestTime(String consumerRequestTime) {
+        this.consumerRequestTime = consumerRequestTime;
+    }
+    
+    
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaConsumerFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaConsumerFactory.java
new file mode 100644 (file)
index 0000000..26a8cf4
--- /dev/null
@@ -0,0 +1,356 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.beans;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.recipes.locks.InterProcessMutex;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.Consumer;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory;
+import org.onap.dmaap.dmf.mr.backends.MetricsSet;
+import org.onap.dmaap.dmf.mr.backends.kafka.*;
+import org.onap.dmaap.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.onap.dmaap.dmf.mr.utils.Utils;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Properties;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * @author nilanjana.maity
+ *
+ */
+public class DMaaPKafkaConsumerFactory implements ConsumerFactory {
+
+       
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPKafkaConsumerFactory.class);
+       
+
+       /**
+        * constructor initialization
+        * 
+        * @param settings
+        * @param metrics
+        * @param curator
+        * @throws missingReqdSetting
+        * @throws KafkaConsumerCacheException
+        * @throws UnknownHostException
+        */
+
+       public DMaaPKafkaConsumerFactory(@Qualifier("dMaaPMetricsSet") MetricsSet metrics,
+                       @Qualifier("curator") CuratorFramework curator,
+                       @Qualifier("kafkalockavoid") KafkaLiveLockAvoider2 kafkaLiveLockAvoider)
+                       throws missingReqdSetting, KafkaConsumerCacheException, UnknownHostException {
+
+               String apiNodeId = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               CambriaConstants.kSetting_ApiNodeIdentifier);
+               if (apiNodeId == null) {
+
+                       apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port;
+               }
+
+               log.info("This Cambria API Node identifies itself as [" + apiNodeId + "].");
+               final String mode = CambriaConstants.DMAAP;
+
+               fkafkaBrokers = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "kafka.metadata.broker.list");
+               if (null == fkafkaBrokers) {
+
+                       fkafkaBrokers = "localhost:9092";
+               }
+
+               boolean kSetting_EnableCache = kDefault_IsCacheEnabled;
+               String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "cambria.consumer.cache.enabled");
+               if (null != strkSetting_EnableCache)
+                       kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
+
+               final boolean isCacheEnabled = kSetting_EnableCache;
+
+               
+               fCache = null;
+               if (isCacheEnabled) {
+                       fCache = KafkaConsumerCache.getInstance();
+
+               }
+               if (fCache != null) {
+                       fCache.setfMetrics(metrics);
+                       fCache.setfApiId(apiNodeId);
+                       fCache.startCache(mode, curator);
+                       if(kafkaLiveLockAvoider!=null){
+                       kafkaLiveLockAvoider.startNewWatcherForServer(apiNodeId, makeAvoidanceCallback(apiNodeId));
+                       fkafkaLiveLockAvoider = kafkaLiveLockAvoider;
+                       }
+               }
+       }
+
+       /*
+        * getConsumerFor
+        * 
+        * @see
+        * com.att.dmf.mr.backends.ConsumerFactory#getConsumerFor(java.lang.String,
+        * java.lang.String, java.lang.String, int, java.lang.String) This method is
+        * used by EventServiceImpl.getEvents() method to get a Kakfa consumer
+        * either from kafkaconsumer cache or create a new connection This also get
+        * the list of other consumer objects for the same consumer group and set to
+        * KafkaConsumer object. This list may be used during poll-rebalancing
+        * issue.
+        */
+       @Override
+       public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs,
+                       String remotehost) throws UnavailableException, CambriaApiException {
+               Kafka011Consumer kc;
+
+               // To synchronize based on the consumer group.
+
+               Object syncObject = synchash.get(topic + consumerGroupName);
+               if (null == syncObject) {
+                       syncObject = new Object();
+                       synchash.put(topic + consumerGroupName, syncObject);
+               }
+
+               synchronized (syncObject) {
+                       try {
+                               kc = (fCache != null) ? fCache.getConsumerFor(topic, consumerGroupName, consumerId) : null; // consumerId
+
+                       } catch (KafkaConsumerCacheException e) {
+                               log.info("######@@@@### Error occured in Kafka Caching" + e + "  " + topic + "::" + consumerGroupName
+                                               + "::" + consumerId);
+                               log.error("####@@@@## Error occured in Kafka Caching" + e + "  " + topic + "::" + consumerGroupName
+                                               + "::" + consumerId);
+                               throw new UnavailableException(e);
+                       }
+
+                       // Ideally if cache exists below flow should be skipped. If cache
+                       // didnt
+                       // exist, then create this first time on this node.
+                       if (kc == null) {
+
+                               log.info("^Kafka consumer cache value " + topic + "::" + consumerGroupName + "::" + consumerId + " =>"
+                                               + kc);
+
+                               final InterProcessMutex ipLock = new InterProcessMutex(ConfigurationReader.getCurator(),
+                                               "/consumerFactory/" + topic + "/" + consumerGroupName + "/" + consumerId);
+                               boolean locked = false;
+
+                               try {
+
+                                       locked = ipLock.acquire(30, TimeUnit.SECONDS);
+                                       if (!locked) {
+
+                                               log.info("Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic
+                                                               + ", " + consumerGroupName + ", " + consumerId + ") from " + remotehost);
+                                               throw new UnavailableException(
+                                                               "Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic
+                                                                               + ", " + consumerGroupName + ", " + consumerId + ") " + remotehost);
+                                       }
+
+                                       // ConfigurationReader.getCurator().checkExists().forPath("S").
+
+                                       log.info("Creating Kafka consumer for group [" + consumerGroupName + "], consumer [" + consumerId
+                                                       + "], on topic [" + topic + "].");
+                                       
+                                       if (fCache != null) {
+                                               fCache.signalOwnership(topic, consumerGroupName, consumerId);
+                                       }
+                                       
+                                       final Properties props = createConsumerConfig(topic,consumerGroupName, consumerId);
+                                       long fCreateTimeMs = System.currentTimeMillis();
+                                       KafkaConsumer<String, String> cc = new KafkaConsumer<>(props);
+                                       kc = new Kafka011Consumer(topic, consumerGroupName, consumerId, cc, fkafkaLiveLockAvoider);
+                                       log.info(" kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
+
+                                       if (fCache != null) {
+                                               fCache.putConsumerFor(topic, consumerGroupName, consumerId, kc); //
+                                       }
+
+                               } catch (org.I0Itec.zkclient.exception.ZkTimeoutException x) {
+                                       log.info(
+                                                       "Kafka consumer couldn't connect to ZK. " + x + " " + consumerGroupName + "/" + consumerId);
+                                       throw new UnavailableException("Couldn't connect to ZK.");
+                               } catch (KafkaConsumerCacheException e) {
+                                       log.info("Failed to cache consumer (this may have performance implications): " + e.getMessage()
+                                                       + " " + consumerGroupName + "/" + consumerId);
+                               } catch (UnavailableException u) {
+                                       log.info("Failed and in UnavailableException block " + u.getMessage() + " " + consumerGroupName
+                                                       + "/" + consumerId);
+                                       throw new UnavailableException("Error while acquiring consumer factory lock " + u.getMessage(), u);
+                               } catch (Exception e) {
+                                       log.info("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/"
+                                                       + consumerId);
+                                       log.error("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/"
+                                                       + consumerId);
+                                       
+                               } finally {
+                                       if (locked) {
+                                               try {
+                                                       ipLock.release();
+                                               } catch (Exception e) {
+                                                       log.error("Error while releasing consumer factory lock", e);
+                                               }
+                                       }
+                               }
+                       }
+               }
+               return kc;
+       }
+
+       @Override
+       public synchronized void destroyConsumer(String topic, String consumerGroup, String clientId) {
+               if (fCache != null) {
+                       fCache.dropConsumer(topic, consumerGroup, clientId);
+               }
+       }
+
+       @Override
+       public synchronized Collection<? extends Consumer> getConsumers() {
+               return fCache.getConsumers();
+       }
+
+       @Override
+       public synchronized void dropCache() {
+               fCache.dropAllConsumers();
+       }
+
+       
+       private KafkaConsumerCache fCache;
+       private KafkaLiveLockAvoider2 fkafkaLiveLockAvoider;
+       private String fkafkaBrokers;
+
+
+
+       private static String makeLongKey(String key, String prefix) {
+               return prefix + "." + key;
+       }
+
+       private void transferSettingIfProvided(Properties target, String key, String prefix) {
+               String keyVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, makeLongKey(key, prefix));
+
+               
+               if (null != keyVal) {
+               
+                       log.info("Setting [" + key + "] to " + keyVal + ".");
+                       target.put(key, keyVal);
+               }
+       }
+
+       /**
+        * Name CreateConsumerconfig  
+        * @param topic
+        * @param groupId
+        * @param consumerId
+        * @return Properties
+        * 
+        * This method is to create Properties required to create kafka connection
+        * Group name is replaced with different format groupid--topic to address same 
+        * groupids for multiple topics. Same groupid with multiple topics 
+        * may start frequent consumer rebalancing on all the topics . Replacing them makes it unique
+        */
+       private Properties createConsumerConfig(String topic ,String groupId, String consumerId) {
+               final Properties props = new Properties();
+               //fakeGroupName is added to avoid multiple consumer group for multiple topics.Donot Change this logic
+               //Fix for CPFMF-644 : 
+               final String fakeGroupName = groupId + "--" + topic;
+               props.put("group.id", fakeGroupName);
+               props.put("enable.auto.commit", "false"); // 0.11
+               props.put("bootstrap.servers", fkafkaBrokers);
+               if(Utils.isCadiEnabled()){
+               props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
+               props.put("security.protocol", "SASL_PLAINTEXT");
+               props.put("sasl.mechanism", "PLAIN");
+               }
+               props.put("client.id", consumerId);
+
+               // additional settings: start with our defaults, then pull in configured
+               // overrides
+               populateKafkaInternalDefaultsMap();
+               for (String key : KafkaConsumerKeys) {
+                       transferSettingIfProvided(props, key, "kafka");
+               }
+
+               props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+               props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+
+               return props;
+       }
+
+
+       private static final String KafkaConsumerKeys[] = { "bootstrap.servers", "heartbeat.interval.ms",
+                       "auto.offset.reset", "exclude.internal.topics", "session.timeout.ms", "fetch.max.bytes",
+                       "auto.commit.interval.ms", "connections.max.idle.ms", "fetch.min.bytes", "isolation.level",
+                       "fetch.max.bytes", "request.timeout.ms", "fetch.max.wait.bytes", "reconnect.backoff.max.ms",
+                       "max.partition.fetch.bytes", "reconnect.backoff.max.ms", "reconnect.backoff.ms", "retry.backoff.ms",
+                       "max.poll.interval.ms", "max.poll.records", "receive.buffer.bytes", "metadata.max.age.ms" };
+
+       /**
+        * putting values in hashmap like consumer timeout, zookeeper time out, etc
+        * 
+        * @param setting
+        */
+       private static void populateKafkaInternalDefaultsMap() { }
+
+       /*
+        * The starterIncremnt value is just to emulate calling certain consumers,
+        * in this test app all the consumers are local
+        * 
+        */
+       private LiveLockAvoidance makeAvoidanceCallback(final String appId) {
+
+               return new LiveLockAvoidance() {
+
+                       @Override
+                       public String getAppId() {
+                               return appId;
+                       }
+
+                       @Override
+                       public void handleRebalanceUnlock(String groupName) {
+                               log.info("FORCE A POLL NOW FOR appId: [{}] group: [{}]", getAppId(), groupName);
+                               Kafka011ConsumerUtil.forcePollOnConsumer(groupName + "::");
+                       }
+
+               };
+
+       }
+
+       @SuppressWarnings("rawtypes")
+       @Override
+       public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs,
+                       String remotehost) throws UnavailableException, CambriaApiException {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       private HashMap<String, Object> synchash = new HashMap<String, Object>();
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaMetaBroker.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaMetaBroker.java
new file mode 100644 (file)
index 0000000..9ab4c83
--- /dev/null
@@ -0,0 +1,474 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.beans;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.ConfigPath;
+import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaAclUtils;
+import com.att.nsa.security.NsaApiKey;
+import org.I0Itec.zkclient.ZkClient;
+import org.I0Itec.zkclient.exception.ZkNoNodeException;
+import org.apache.kafka.clients.admin.AdminClient;
+import org.apache.kafka.clients.admin.AdminClientConfig;
+import org.apache.kafka.clients.admin.CreateTopicsResult;
+import org.apache.kafka.clients.admin.NewTopic;
+import org.apache.kafka.common.KafkaFuture;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.metabroker.Broker1;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.onap.dmaap.dmf.mr.utils.Utils;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.util.StringUtils;
+
+import java.util.*;
+import java.util.concurrent.ExecutionException;
+
+
+/**
+ * class performing all topic operations
+ * 
+ * @author anowarul.islam
+ *
+ */
+//@Component
+public class DMaaPKafkaMetaBroker implements Broker1 {
+
+       public DMaaPKafkaMetaBroker() {
+               fZk = null;
+               fCambriaConfig = null;
+               fBaseTopicData = null;
+               final Properties props = new Properties ();
+               String fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "kafka.metadata.broker.list");
+               if (StringUtils.isEmpty(fkafkaBrokers)) {
+
+                       fkafkaBrokers = "localhost:9092";
+               }
+               
+            props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers );
+            if(Utils.isCadiEnabled()){
+            props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
+                props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");            
+            props.put("sasl.mechanism", "PLAIN");
+            }
+          
+            fKafkaAdminClient=AdminClient.create ( props );
+           
+       }
+
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
+       private final AdminClient fKafkaAdminClient;
+       
+       
+
+       /**
+        * DMaaPKafkaMetaBroker constructor initializing
+        * 
+        * @param settings
+        * @param zk
+        * @param configDb
+        */
+       public DMaaPKafkaMetaBroker(@Qualifier("propertyReader") rrNvReadable settings,
+                       @Qualifier("dMaaPZkClient") ZkClient zk, @Qualifier("dMaaPZkConfigDb") ConfigDb configDb) {
+               fZk = zk;
+               fCambriaConfig = configDb;
+               fBaseTopicData = configDb.parse("/topics");
+               final Properties props = new Properties ();
+               String fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "kafka.metadata.broker.list");
+               if (null == fkafkaBrokers) {
+
+                       fkafkaBrokers = "localhost:9092";
+               }
+               
+                if(Utils.isCadiEnabled()){
+                props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
+                props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");            
+            props.put("sasl.mechanism", "PLAIN");
+                }
+            props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers );
+            
+            fKafkaAdminClient=AdminClient.create ( props );
+           
+               
+               
+       }
+       
+       public DMaaPKafkaMetaBroker( rrNvReadable settings,
+                       ZkClient zk,  ConfigDb configDb,AdminClient client) {
+               
+               fZk = zk;
+               fCambriaConfig = configDb;
+               fBaseTopicData = configDb.parse("/topics");
+           fKafkaAdminClient= client;
+          
+               
+               
+       }
+
+       @Override
+       public List<Topic> getAllTopics() throws ConfigDbException {
+               log.info("Retrieving list of all the topics.");
+               final LinkedList<Topic> result = new LinkedList<>();
+               try {
+                       log.info("Retrieving all topics from root: " + zkTopicsRoot);
+                       final List<String> topics = fZk.getChildren(zkTopicsRoot);
+                       for (String topic : topics) {
+                               result.add(new KafkaTopic(topic, fCambriaConfig, fBaseTopicData));
+                       }
+                       JSONObject dataObj = new JSONObject();
+                       dataObj.put("topics", new JSONObject());
+
+                       for (String topic : topics) {
+                               dataObj.getJSONObject("topics").put(topic, new JSONObject());
+                       }
+               } catch (ZkNoNodeException excp) {
+                       // very fresh kafka doesn't have any topics or a topics node
+                       log.error("ZK doesn't have a Kakfa topics node at " + zkTopicsRoot, excp);
+               }
+               return result;
+       }
+
+       @Override
+       public Topic getTopic(String topic) throws ConfigDbException {
+               if (fZk.exists(zkTopicsRoot + "/" + topic)) {
+                       return getKafkaTopicConfig(fCambriaConfig, fBaseTopicData, topic);
+               }
+               // else: no such topic in kafka
+               return null;
+       }
+
+       /**
+        * static method get KafkaTopic object
+        * 
+        * @param db
+        * @param base
+        * @param topic
+        * @return
+        * @throws ConfigDbException
+        */
+       public static KafkaTopic getKafkaTopicConfig(ConfigDb db, ConfigPath base, String topic) throws ConfigDbException {
+               return new KafkaTopic(topic, db, base);
+       }
+
+       /**
+        * creating topic
+        */
+       @Override
+       public Topic createTopic(String topic, String desc, String ownerApiKey, int partitions, int replicas,
+                       boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException {
+               log.info("Creating topic: " + topic);
+               try {
+                       log.info("Check if topic [" + topic + "] exist.");
+                       // first check for existence "our way"
+                       final Topic t = getTopic(topic);
+                       if (t != null) {
+                               log.info("Could not create topic [" + topic + "]. Topic Already exists.");
+                               throw new TopicExistsException("Could not create topic [" + topic + "]. Topic Alreay exists.");
+                       }
+               } catch (ConfigDbException e1) {
+                       log.error("Topic [" + topic + "] could not be created. Couldn't check topic data in config db.", e1);
+                       throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
+                                       "Couldn't check topic data in config db.");
+               }
+
+               // we only allow 3 replicas. (If we don't test this, we get weird
+               // results from the cluster,
+               // so explicit test and fail.)
+               if (replicas < 1 || replicas > 3) {
+                       log.info("Topic [" + topic + "] could not be created. The replica count must be between 1 and 3.");
+                       throw new CambriaApiException(HttpStatusCodes.k400_badRequest,
+                                       "The replica count must be between 1 and 3.");
+               }
+               if (partitions < 1) {
+                       log.info("Topic [" + topic + "] could not be created. The partition count must be at least 1.");
+                       throw new CambriaApiException(HttpStatusCodes.k400_badRequest, "The partition count must be at least 1.");
+               }
+
+               // create via kafka
+
+        try {
+            final NewTopic topicRequest =
+                    new NewTopic(topic, partitions, (short)replicas);
+            final CreateTopicsResult ctr =
+                    fKafkaAdminClient.createTopics(Arrays.asList(topicRequest));
+            final KafkaFuture<Void> ctrResult = ctr.all();
+            ctrResult.get();
+            // underlying Kafka topic created. now setup our API info
+            return createTopicEntry(topic, desc, ownerApiKey, transactionEnabled);
+        } catch (InterruptedException e) {
+            log.warn("Execution of describeTopics timed out.");
+            throw new ConfigDbException(e);
+        } catch (ExecutionException e) {
+            log.warn("Execution of describeTopics failed: " + e.getCause().getMessage(), e);
+            throw new ConfigDbException(e.getCause());
+        }
+               
+       }
+
+       @Override
+       public void deleteTopic(String topic) throws CambriaApiException, TopicExistsException,ConfigDbException {
+               log.info("Deleting topic: " + topic);
+               try {
+                       log.info("Loading zookeeper client for topic deletion.");
+                                       // topic creation. (Otherwise, the topic is only partially created
+                       // in ZK.)
+                       
+                       
+                       fKafkaAdminClient.deleteTopics(Arrays.asList(topic));
+                       log.info("Zookeeper client loaded successfully. Deleting topic.");
+                       
+               } catch (Exception e) {
+                       log.error("Failed to delete topic [" + topic + "]. " + e.getMessage(), e);
+                       throw new ConfigDbException(e);
+               }  finally {
+                       log.info("Closing zookeeper connection.");
+               }
+       }
+
+       private final ZkClient fZk;
+       private final ConfigDb fCambriaConfig;
+       private final ConfigPath fBaseTopicData;
+
+       private static final String zkTopicsRoot = "/brokers/topics";
+       private static final JSONObject kEmptyAcl = new JSONObject();
+
+       /**
+        * method Providing KafkaTopic Object associated with owner and
+        * transactionenabled or not
+        * 
+        * @param name
+        * @param desc
+        * @param owner
+        * @param transactionEnabled
+        * @return
+        * @throws ConfigDbException
+        */
+       public KafkaTopic createTopicEntry(String name, String desc, String owner, boolean transactionEnabled)
+                       throws ConfigDbException {
+               return createTopicEntry(fCambriaConfig, fBaseTopicData, name, desc, owner, transactionEnabled);
+       }
+
+       /**
+        * static method giving kafka topic object
+        * 
+        * @param db
+        * @param basePath
+        * @param name
+        * @param desc
+        * @param owner
+        * @param transactionEnabled
+        * @return
+        * @throws ConfigDbException
+        */
+       public static KafkaTopic createTopicEntry(ConfigDb db, ConfigPath basePath, String name, String desc, String owner,
+                       boolean transactionEnabled) throws ConfigDbException {
+               final JSONObject o = new JSONObject();
+               o.put("owner", owner);
+               o.put("description", desc);
+               o.put("txenabled", transactionEnabled);
+               db.store(basePath.getChild(name), o.toString());
+               return new KafkaTopic(name, db, basePath);
+       }
+
+       /**
+        * class performing all user opearation like user is eligible to read,
+        * write. permitting a user to write and read,
+        * 
+        * @author anowarul.islam
+        *
+        */
+       public static class KafkaTopic implements Topic {
+               /**
+                * constructor initializes
+                * 
+                * @param name
+                * @param configdb
+                * @param baseTopic
+                * @throws ConfigDbException
+                */
+               public KafkaTopic(String name, ConfigDb configdb, ConfigPath baseTopic) throws ConfigDbException {
+                       fName = name;
+                       fConfigDb = configdb;
+                       fBaseTopicData = baseTopic;
+
+                       String data = fConfigDb.load(fBaseTopicData.getChild(fName));
+                       if (data == null) {
+                               data = "{}";
+                       }
+
+                       final JSONObject o = new JSONObject(data);
+                       fOwner = o.optString("owner", "");
+                       fDesc = o.optString("description", "");
+                       fTransactionEnabled = o.optBoolean("txenabled", false);// default
+                                                                                                                                       // value is
+                                                                                                                                       // false
+                       // if this topic has an owner, it needs both read/write ACLs. If there's no
+                                               // owner (or it's empty), null is okay -- this is for existing or implicitly
+                                               // created topics.
+                                               JSONObject readers = o.optJSONObject ( "readers" );
+                                               if ( readers == null && fOwner.length () > 0 )
+                                               {
+                                                   readers = kEmptyAcl;
+                                               }
+                                               fReaders =  fromJson ( readers );
+
+                                               JSONObject writers = o.optJSONObject ( "writers" );
+                                               if ( writers == null && fOwner.length () > 0 )
+                                               {
+                                                   writers = kEmptyAcl;
+                                               }
+                                               fWriters = fromJson ( writers );
+               }
+               
+               private NsaAcl fromJson(JSONObject o) {
+                       NsaAcl acl = new NsaAcl();
+                       if (o != null) {
+                               JSONArray a = o.optJSONArray("allowed");
+                               if (a != null) {
+                                       for (int i = 0; i < a.length(); ++i) {
+                                               String user = a.getString(i);
+                                               acl.add(user);
+                                       }
+                               }
+                       }
+                       return acl;
+               }
+
+               @Override
+               public String getName() {
+                       return fName;
+               }
+
+               @Override
+               public String getOwner() {
+                       return fOwner;
+               }
+
+               @Override
+               public String getDescription() {
+                       return fDesc;
+               }
+
+               @Override
+               public NsaAcl getReaderAcl() {
+                       return fReaders;
+               }
+
+               @Override
+               public NsaAcl getWriterAcl() {
+                       return fWriters;
+               }
+
+               @Override
+               public void checkUserRead(NsaApiKey user) throws AccessDeniedException  {
+                       NsaAclUtils.checkUserAccess ( fOwner, getReaderAcl(), user );
+               }
+
+               @Override
+               public void checkUserWrite(NsaApiKey user) throws AccessDeniedException  {
+                       NsaAclUtils.checkUserAccess ( fOwner, getWriterAcl(), user );
+               }
+
+               @Override
+               public void permitWritesFromUser(String pubId, NsaApiKey asUser)
+                               throws ConfigDbException, AccessDeniedException {
+                       updateAcl(asUser, false, true, pubId);
+               }
+
+               @Override
+               public void denyWritesFromUser(String pubId, NsaApiKey asUser) throws ConfigDbException, AccessDeniedException {
+                       updateAcl(asUser, false, false, pubId);
+               }
+
+               @Override
+               public void permitReadsByUser(String consumerId, NsaApiKey asUser)
+                               throws ConfigDbException, AccessDeniedException {
+                       updateAcl(asUser, true, true, consumerId);
+               }
+
+               @Override
+               public void denyReadsByUser(String consumerId, NsaApiKey asUser)
+                               throws ConfigDbException, AccessDeniedException {
+                       updateAcl(asUser, true, false, consumerId);
+               }
+
+               private void updateAcl(NsaApiKey asUser, boolean reader, boolean add, String key)
+                               throws ConfigDbException, AccessDeniedException{
+                       try
+                       {
+                               final NsaAcl acl = NsaAclUtils.updateAcl ( this, asUser, key, reader, add );
+       
+                               // we have to assume we have current data, or load it again. for the expected use
+                               // case, assuming we can overwrite the data is fine.
+                               final JSONObject o = new JSONObject ();
+                               o.put ( "owner", fOwner );
+                               o.put ( "readers", safeSerialize ( reader ? acl : fReaders ) );
+                               o.put ( "writers", safeSerialize ( reader ? fWriters : acl ) );
+                               fConfigDb.store ( fBaseTopicData.getChild ( fName ), o.toString () );
+                               
+                               log.info ( "ACL_UPDATE: " + asUser.getKey () + " " + ( add ? "added" : "removed" ) + ( reader?"subscriber":"publisher" ) + " " + key + " on " + fName );
+       
+                       }
+                       catch ( ConfigDbException | AccessDeniedException x )
+                       {
+                               throw x;
+                       }
+                       
+               }
+
+               private JSONObject safeSerialize(NsaAcl acl) {
+                       return acl == null ? null : acl.serialize();
+               }
+
+               private final String fName;
+               private final ConfigDb fConfigDb;
+               private final ConfigPath fBaseTopicData;
+               private final String fOwner;
+               private final String fDesc;
+               private final NsaAcl fReaders;
+               private final NsaAcl fWriters;
+               private boolean fTransactionEnabled;
+       
+               public boolean isTransactionEnabled() {
+                       return fTransactionEnabled;
+               }
+
+               @Override
+               public Set<String> getOwners() {
+                       final TreeSet<String> owners = new TreeSet<>();
+                       owners.add ( fOwner );
+                       return owners;
+               }
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPMetricsSet.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPMetricsSet.java
new file mode 100644 (file)
index 0000000..184c414
--- /dev/null
@@ -0,0 +1,225 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.beans;
+
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.metrics.impl.*;
+import org.onap.dmaap.dmf.mr.CambriaApiVersionInfo;
+import org.onap.dmaap.dmf.mr.backends.MetricsSet;
+import org.onap.dmaap.mr.apiServer.metrics.cambria.DMaaPMetricsSender;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+/*@Component("dMaaPMetricsSet")*/
+
+/**
+ * Metrics related information
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class DMaaPMetricsSet extends CdmMetricsRegistryImpl implements MetricsSet {
+
+       private final CdmStringConstant fVersion;
+       private final CdmConstant fStartTime;
+       private final CdmTimeSince fUpTime;
+
+       private final CdmCounter fRecvTotal;
+       private final CdmRateTicker fRecvEpsInstant;
+       private final CdmRateTicker fRecvEpsShort;
+       private final CdmRateTicker fRecvEpsLong;
+
+       private final CdmCounter fSendTotal;
+       private final CdmRateTicker fSendEpsInstant;
+       private final CdmRateTicker fSendEpsShort;
+       private final CdmRateTicker fSendEpsLong;
+
+       private final CdmCounter fKafkaConsumerCacheMiss;
+       private final CdmCounter fKafkaConsumerCacheHit;
+
+       private final CdmCounter fKafkaConsumerClaimed;
+       private final CdmCounter fKafkaConsumerTimeout;
+
+       private final CdmSimpleMetric fFanOutRatio;
+
+       private final HashMap<String, CdmRateTicker> fPathUseRates;
+       private final HashMap<String, CdmMovingAverage> fPathAvgs;
+
+       private rrNvReadable fSettings;
+
+       private final ScheduledExecutorService fScheduler;
+
+       /**
+        * Constructor initialization
+        * 
+        * @param cs
+        */
+       
+               public DMaaPMetricsSet(rrNvReadable cs) {
+               
+               fVersion = new CdmStringConstant("Version " + CambriaApiVersionInfo.getVersion());
+               super.putItem("version", fVersion);
+
+               final long startTime = System.currentTimeMillis();
+               final Date d = new Date(startTime);
+               final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d);
+               fStartTime = new CdmConstant(startTime / 1000, "Start Time (epoch); " + text);
+               super.putItem("startTime", fStartTime);
+
+               fUpTime = new CdmTimeSince("seconds since start");
+               super.putItem("upTime", fUpTime);
+
+               fRecvTotal = new CdmCounter("Total events received since start");
+               super.putItem("recvTotalEvents", fRecvTotal);
+
+               fRecvEpsInstant = new CdmRateTicker("recv eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES);
+               super.putItem("recvEpsInstant", fRecvEpsInstant);
+
+               fRecvEpsShort = new CdmRateTicker("recv eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES);
+               super.putItem("recvEpsShort", fRecvEpsShort);
+
+               fRecvEpsLong = new CdmRateTicker("recv eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS);
+               super.putItem("recvEpsLong", fRecvEpsLong);
+
+               fSendTotal = new CdmCounter("Total events sent since start");
+               super.putItem("sendTotalEvents", fSendTotal);
+
+               fSendEpsInstant = new CdmRateTicker("send eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES);
+               super.putItem("sendEpsInstant", fSendEpsInstant);
+
+               fSendEpsShort = new CdmRateTicker("send eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES);
+               super.putItem("sendEpsShort", fSendEpsShort);
+
+               fSendEpsLong = new CdmRateTicker("send eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS);
+               super.putItem("sendEpsLong", fSendEpsLong);
+
+               fKafkaConsumerCacheMiss = new CdmCounter("Kafka Consumer Cache Misses");
+               super.putItem("kafkaConsumerCacheMiss", fKafkaConsumerCacheMiss);
+
+               fKafkaConsumerCacheHit = new CdmCounter("Kafka Consumer Cache Hits");
+               super.putItem("kafkaConsumerCacheHit", fKafkaConsumerCacheHit);
+
+               fKafkaConsumerClaimed = new CdmCounter("Kafka Consumers Claimed");
+               super.putItem("kafkaConsumerClaims", fKafkaConsumerClaimed);
+
+               fKafkaConsumerTimeout = new CdmCounter("Kafka Consumers Timedout");
+               super.putItem("kafkaConsumerTimeouts", fKafkaConsumerTimeout);
+
+               // FIXME: CdmLevel is not exactly a great choice
+               fFanOutRatio = new CdmSimpleMetric() {
+                       @Override
+                       public String getRawValueString() {
+                               return getRawValue().toString();
+                       }
+
+                       @Override
+                       public Number getRawValue() {
+                               final double s = fSendTotal.getValue();
+                               final double r = fRecvTotal.getValue();
+                               return r == 0.0 ? 0.0 : s / r;
+                       }
+
+                       @Override
+                       public String summarize() {
+                               return getRawValueString() + " sends per recv";
+                       }
+
+               };
+               super.putItem("fanOut", fFanOutRatio);
+
+               // these are added to the metrics catalog as they're discovered
+               fPathUseRates = new HashMap<String, CdmRateTicker>();
+               fPathAvgs = new HashMap<String, CdmMovingAverage>();
+
+               fScheduler = Executors.newScheduledThreadPool(1);
+       }
+
+       @Override
+       public void setupCambriaSender() {
+               DMaaPMetricsSender.sendPeriodically(fScheduler, this,  "cambria.apinode.metrics.dmaap");
+       }
+
+       @Override
+       public void onRouteComplete(String name, long durationMs) {
+               CdmRateTicker ticker = fPathUseRates.get(name);
+               if (ticker == null) {
+                       ticker = new CdmRateTicker("calls/min on path " + name + "", 1, TimeUnit.MINUTES, 1, TimeUnit.HOURS);
+                       fPathUseRates.put(name, ticker);
+                       super.putItem("pathUse_" + name, ticker);
+               }
+               ticker.tick();
+
+               CdmMovingAverage durs = fPathAvgs.get(name);
+               if (durs == null) {
+                       durs = new CdmMovingAverage("ms avg duration on path " + name + ", last 10 minutes", 10, TimeUnit.MINUTES);
+                       fPathAvgs.put(name, durs);
+                       super.putItem("pathDurationMs_" + name, durs);
+               }
+               durs.tick(durationMs);
+       }
+
+       @Override
+       public void publishTick(int amount) {
+               if (amount > 0) {
+                       fRecvTotal.bumpBy(amount);
+                       fRecvEpsInstant.tick(amount);
+                       fRecvEpsShort.tick(amount);
+                       fRecvEpsLong.tick(amount);
+               }
+       }
+
+       @Override
+       public void consumeTick(int amount) {
+               if (amount > 0) {
+                       fSendTotal.bumpBy(amount);
+                       fSendEpsInstant.tick(amount);
+                       fSendEpsShort.tick(amount);
+                       fSendEpsLong.tick(amount);
+               }
+       }
+
+       @Override
+       public void onKafkaConsumerCacheMiss() {
+               fKafkaConsumerCacheMiss.bump();
+       }
+
+       @Override
+       public void onKafkaConsumerCacheHit() {
+               fKafkaConsumerCacheHit.bump();
+       }
+
+       @Override
+       public void onKafkaConsumerClaimed() {
+               fKafkaConsumerClaimed.bump();
+       }
+
+       @Override
+       public void onKafkaConsumerTimeout() {
+               fKafkaConsumerTimeout.bump();
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPNsaApiDb.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPNsaApiDb.java
new file mode 100644 (file)
index 0000000..88ae8ae
--- /dev/null
@@ -0,0 +1,138 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.beans;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.confimpl.EncryptingLayer;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.db.BaseNsaApiDbImpl;
+import com.att.nsa.security.db.EncryptingApiDbImpl;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
+import com.att.nsa.util.rrConvertor;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.security.Key;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class DMaaPNsaApiDb {
+       
+       
+       private DMaaPZkConfigDb cdb;
+       
+       //private static final Logger log = Logger
+               
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPNsaApiDb.class);
+       
+/**
+ * 
+ * Constructor initialized
+ * @param settings
+ * @param cdb
+ */
+       @Autowired
+       public DMaaPNsaApiDb(rrNvReadable settings, DMaaPZkConfigDb cdb) {
+               
+               this.setCdb(cdb);
+       }
+       /**
+        * 
+        * @param settings
+        * @param cdb
+        * @return
+        * @throws ConfigDbException
+        * @throws missingReqdSetting
+        */
+       public static NsaApiDb<NsaSimpleApiKey> buildApiKeyDb(
+                       rrNvReadable settings, ConfigDb cdb) throws ConfigDbException,
+                       missingReqdSetting {
+               // Cambria uses an encrypted api key db
+
+               
+               final String keyBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.key");
+               
+               
+       
+       final String initVectorBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.iv");
+               // if neither value was provided, don't encrypt api key db
+               if (keyBase64 == null && initVectorBase64 == null) {
+                       log.info("This server is configured to use an unencrypted API key database. See the settings documentation.");
+                       return new BaseNsaApiDbImpl<>(cdb,
+                                       new NsaSimpleApiKeyFactory());
+               } else if (keyBase64 == null) {
+                       // neither or both, otherwise something's goofed
+                       throw new missingReqdSetting("cambria.secureConfig.key");
+               } else if (initVectorBase64 == null) {
+                       // neither or both, otherwise something's goofed
+                       throw new missingReqdSetting("cambria.secureConfig.iv");
+               } else {
+                       log.info("This server is configured to use an encrypted API key database.");
+                       final Key key = EncryptingLayer.readSecretKey(keyBase64);
+                       final byte[] iv = rrConvertor.base64Decode(initVectorBase64);
+                       return new EncryptingApiDbImpl<>(cdb,
+                                       new NsaSimpleApiKeyFactory(), key, iv);
+               }
+       }
+
+       /**
+        * @return
+        * returns settings
+        */
+
+               
+       
+
+       /**
+        * @param settings
+        * set settings
+        */
+       
+               
+       
+
+        /**
+        * @return
+        * returns cbd
+        */
+       public DMaaPZkConfigDb getCdb() {
+               return cdb;
+       }
+       /**
+        * @param cdb
+        * set cdb
+        */
+       public void setCdb(DMaaPZkConfigDb cdb) {
+               this.cdb = cdb;
+       }
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkClient.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkClient.java
new file mode 100644 (file)
index 0000000..d3fdcaa
--- /dev/null
@@ -0,0 +1,44 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.beans;
+
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import org.I0Itec.zkclient.ZkClient;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+/**
+ * Created for Zookeeper client which will read configuration and settings parameter
+ * @author nilanjana.maity
+ *
+ */
+public class DMaaPZkClient extends ZkClient {
+
+       /**
+        * This constructor will get the settings value from rrNvReadable
+     * and ConfigurationReader's zookeeper connection
+        * @param settings
+        */
+       public DMaaPZkClient(@Qualifier("propertyReader") rrNvReadable settings) {
+               super(ConfigurationReader.getMainZookeeperConnectionString());
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkConfigDb.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkConfigDb.java
new file mode 100644 (file)
index 0000000..d964efa
--- /dev/null
@@ -0,0 +1,50 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.beans;
+
+import com.att.nsa.configs.confimpl.ZkConfigDb;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+/**
+ * Provide the zookeeper config db connection 
+ * @author nilanjana.maity
+ *
+ */
+public class DMaaPZkConfigDb extends ZkConfigDb {
+       /**
+        * This Constructor will provide the configuration details from the property reader
+     * and DMaaPZkClient
+        * @param zk
+        * @param settings
+        */
+       public DMaaPZkConfigDb(@Qualifier("dMaaPZkClient") DMaaPZkClient zk,
+                       @Qualifier("propertyReader") rrNvReadable settings) {
+               
+               
+               super(ConfigurationReader.getMainZookeeperConnectionString(),ConfigurationReader.getMainZookeeperConnectionSRoot());
+               
+       }
+       
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/LogDetails.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/LogDetails.java
new file mode 100644 (file)
index 0000000..a46cc3c
--- /dev/null
@@ -0,0 +1,214 @@
+/**
+ * 
+ */
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.beans;
+
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.utils.Utils;
+
+import java.util.Date;
+
+/**
+ * @author muzainulhaque.qazi
+ *
+ */
+
+public class LogDetails {
+       
+       private String publisherId;
+       private String topicId;
+       private String subscriberGroupId;
+       private String subscriberId;
+       private String publisherIp;
+       private String messageBatchId;
+       private String messageSequence;
+       private String messageTimestamp;
+       private String consumeTimestamp;
+       private String transactionIdTs; 
+       private String serverIp;
+       
+       private long messageLengthInBytes; 
+       private long totalMessageCount;
+       
+       private boolean transactionEnabled;
+       /**
+        * This is for transaction enabled logging details
+        *
+        */
+       public LogDetails() {
+               super();
+       }
+
+       public String getTransactionId() {
+               StringBuilder transactionId = new StringBuilder();
+               transactionId.append(transactionIdTs);
+               transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
+               transactionId.append(publisherIp);
+               transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
+               transactionId.append(messageBatchId);
+               transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
+               transactionId.append(messageSequence);
+
+               return transactionId.toString();
+       }
+
+       public String getPublisherId() {
+               return publisherId;
+       }
+
+       public void setPublisherId(String publisherId) {
+               this.publisherId = publisherId;
+       }
+
+       public String getTopicId() {
+               return topicId;
+       }
+
+       public void setTopicId(String topicId) {
+               this.topicId = topicId;
+       }
+
+       public String getSubscriberGroupId() {
+               return subscriberGroupId;
+       }
+
+       public void setSubscriberGroupId(String subscriberGroupId) {
+               this.subscriberGroupId = subscriberGroupId;
+       }
+
+       public String getSubscriberId() {
+               return subscriberId;
+       }
+
+       public void setSubscriberId(String subscriberId) {
+               this.subscriberId = subscriberId;
+       }
+
+       public String getPublisherIp() {
+               return publisherIp;
+       }
+
+       public void setPublisherIp(String publisherIp) {
+               this.publisherIp = publisherIp;
+       }
+
+       public String getMessageBatchId() {
+               return messageBatchId;
+       }
+
+       public void setMessageBatchId(Long messageBatchId) {
+               this.messageBatchId = Utils.getFromattedBatchSequenceId(messageBatchId);
+       }
+
+       public String getMessageSequence() {
+               return messageSequence;
+       }
+
+       public void setMessageSequence(String messageSequence) {
+               this.messageSequence = messageSequence;
+       }
+
+       public String getMessageTimestamp() {
+               return messageTimestamp;
+       }
+
+       public void setMessageTimestamp(String messageTimestamp) {
+               this.messageTimestamp = messageTimestamp;
+       }
+
+       public String getPublishTimestamp() {
+               return Utils.getFormattedDate(new Date());
+       }
+
+       public String getConsumeTimestamp() {
+               return consumeTimestamp;
+       }
+
+       public void setConsumeTimestamp(String consumeTimestamp) {
+               this.consumeTimestamp = consumeTimestamp;
+       }
+
+       public long getMessageLengthInBytes() {
+               return messageLengthInBytes;
+       }
+
+       public void setMessageLengthInBytes(long messageLengthInBytes) {
+               this.messageLengthInBytes = messageLengthInBytes;
+       }
+
+       public long getTotalMessageCount() {
+               return totalMessageCount;
+       }
+
+       public void setTotalMessageCount(long totalMessageCount) {
+               this.totalMessageCount = totalMessageCount;
+       }
+
+       public boolean isTransactionEnabled() {
+               return transactionEnabled;
+       }
+
+       public void setTransactionEnabled(boolean transactionEnabled) {
+               this.transactionEnabled = transactionEnabled;
+       }
+
+       public String getTransactionIdTs() {
+               return transactionIdTs;
+       }
+
+       public void setTransactionIdTs(String transactionIdTs) {
+               this.transactionIdTs = transactionIdTs;
+       }
+
+       public String getPublisherLogDetails() {
+               
+                       StringBuilder buffer = new StringBuilder();
+                       buffer.append("[publisherId=" + publisherId);
+                       buffer.append(", topicId=" + topicId);
+                       buffer.append(", messageTimestamp=" + messageTimestamp);
+                       buffer.append(", publisherIp=" + publisherIp);
+                       buffer.append(", messageBatchId=" + messageBatchId);
+                       buffer.append(", messageSequence=" + messageSequence );
+                       buffer.append(", messageLengthInBytes=" + messageLengthInBytes);
+                       buffer.append(", transactionEnabled=" + transactionEnabled);
+                       buffer.append(", transactionId=" + getTransactionId());
+                       buffer.append(", publishTimestamp=" + getPublishTimestamp());           
+                       buffer.append(", serverIp=" + getServerIp()+"]");
+               return buffer.toString();
+               
+       }
+
+       public String getServerIp() {
+               return serverIp;
+       }
+
+       public void setServerIp(String serverIp) {
+               this.serverIp = serverIp;
+       }
+
+       public void setMessageBatchId(String messageBatchId) {
+               this.messageBatchId = messageBatchId;
+       }
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/TopicBean.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/TopicBean.java
new file mode 100644 (file)
index 0000000..c647959
--- /dev/null
@@ -0,0 +1,154 @@
+/**
+ * 
+ */
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.beans;
+
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+
+/**
+ * @author muzainulhaque.qazi
+ *
+ */
+@XmlRootElement
+public class TopicBean implements Serializable {
+
+       private static final long serialVersionUID = -8620390377775457949L;
+       private String topicName;
+       private String topicDescription;
+
+       private int partitionCount;
+       private int replicationCount;
+
+       private boolean transactionEnabled;
+
+       /**
+        * constructor
+        */
+       public TopicBean() {
+               super();
+       }
+
+       /**
+        * constructor initialization with topic details name, description,
+        * partition, replication, transaction
+        * 
+        * @param topicName
+        * @param description
+        * @param partitionCount
+        * @param replicationCount
+        * @param transactionEnabled
+        */
+       public TopicBean(String topicName, String topicDescription, int partitionCount, int replicationCount,
+                       boolean transactionEnabled) {
+               super();
+               this.topicName = topicName;
+               this.topicDescription = topicDescription;
+               this.partitionCount = partitionCount;
+               this.replicationCount = replicationCount;
+               this.transactionEnabled = transactionEnabled;
+       }
+
+       /**
+        * @return
+        * returns topic name which is of String type
+        */
+       public String getTopicName() {
+               return topicName;
+       }
+
+       /**
+        * @param topicName
+        * set topic name  
+        */
+       public void setTopicName(String topicName) {
+               this.topicName = topicName;
+       }
+
+
+       /**
+        * @return
+        * returns partition count which is of int type
+        */
+       public int getPartitionCount() {
+               return partitionCount;
+       }
+
+       /**
+        * @param partitionCount
+        * set partition Count 
+        */
+       public void setPartitionCount(int partitionCount) {
+               this.partitionCount = partitionCount;
+       }
+       
+       /**
+        * @return
+        * returns replication count which is of int type
+        */
+       public int getReplicationCount() {
+               return replicationCount;
+       }
+       
+       /**
+        * @param
+        * set replication count which is of int type
+        */
+       public void setReplicationCount(int replicationCount) {
+               this.replicationCount = replicationCount;
+       }
+       
+       /**
+        * @return
+        * returns boolean value which indicates whether transaction is Enabled 
+        */
+       public boolean isTransactionEnabled() {
+               return transactionEnabled;
+       }
+       
+       /**
+        * @param
+        * sets boolean value which indicates whether transaction is Enabled 
+        */
+       public void setTransactionEnabled(boolean transactionEnabled) {
+               this.transactionEnabled = transactionEnabled;
+       }
+
+       /**
+        * 
+        * @return returns description which is of String type
+        */
+       public String getTopicDescription() {
+               return topicDescription;
+       }
+       /**
+        * 
+        * @param topicDescription
+        * set description which is of String type
+        */
+       public void setTopicDescription(String topicDescription) {
+               this.topicDescription = topicDescription;
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/constants/CambriaConstants.java b/src/main/java/org/onap/dmaap/dmf/mr/constants/CambriaConstants.java
new file mode 100644 (file)
index 0000000..fd342b7
--- /dev/null
@@ -0,0 +1,126 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.constants;
+
+import org.onap.dmaap.dmf.mr.utils.Utils;
+
+/**
+ * This is the constant files for all the property or parameters.
+ * @author nilanjana.maity
+ *
+ */
+public interface CambriaConstants {
+
+       String CAMBRIA = "Cambria";
+       String DMAAP = "DMaaP";
+
+       String kDefault_ZkRoot = "/fe3c/cambria";
+
+       String kSetting_ZkConfigDbRoot = "config.zk.root";
+       String kDefault_ZkConfigDbRoot = kDefault_ZkRoot + "/config";
+String msgRtr_prop="MsgRtrApi.properties";
+       String kBrokerType = "broker.type";
+       
+       /**
+        * value to use to signal kafka broker type.
+        */
+       String kBrokerType_Kafka = "kafka";
+       String kBrokerType_Memory = "memory";
+       String kSetting_AdminSecret = "authentication.adminSecret";
+
+       String kSetting_ApiNodeIdentifier = "cambria.api.node.identifier";
+
+       /**
+        * value to use to signal max empty poll per minute
+        */
+       String kSetting_MaxEmptyPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute";
+       String kSetting_MaxPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute";
+       double kDefault_MaxEmptyPollsPerMinute = 10.0;
+
+       String kSetting_SleepMsOnRateLimit = "cambria.rateLimit.delay.ms";
+       String kSetting_SleepMsRealOnRateLimit = "cambria.rateLimitActual.delay.ms";
+       long kDefault_SleepMsOnRateLimit = Utils.getSleepMsForRate ( kDefault_MaxEmptyPollsPerMinute );
+
+       String kSetting_RateLimitWindowLength = "cambria.rateLimit.window.minutes";
+       int kDefault_RateLimitWindowLength = 5;
+
+       String kConfig = "c";
+
+       String kSetting_Port = "cambria.service.port";
+       /**
+        * value to use to signal default port
+        */
+       int kDefault_Port = 3904;
+
+       String kSetting_MaxThreads = "tomcat.maxthreads";
+       int kDefault_MaxThreads = -1;
+       
+       
+       
+       //String kDefault_TomcatProtocolClass = Http11NioProtocol.class.getName ();
+
+       String kSetting_ZkConfigDbServers = "config.zk.servers";
+       
+       /**
+        * value to indicate localhost port number
+        */
+       String kDefault_ZkConfigDbServers = "localhost:2181";
+
+       /**
+        * value to use to signal Session time out
+        */
+       String kSetting_ZkSessionTimeoutMs = "cambria.consumer.cache.zkSessionTimeout";
+       int kDefault_ZkSessionTimeoutMs = 20 * 1000;
+
+       /**
+        * value to use to signal connection time out 
+        */
+       String kSetting_ZkConnectionTimeoutMs = "cambria.consumer.cache.zkConnectionTimeout";
+       int kDefault_ZkConnectionTimeoutMs = 5 * 1000;
+
+       String TRANSACTION_ID_SEPARATOR = "::";
+
+       /**
+        * value to use to signal there's no timeout on the consumer request.
+        */
+       public static final int kNoTimeout = 10000;
+
+       /**
+        * value to use to signal no limit in the number of messages returned.
+        */
+       public static final int kNoLimit = 0;
+
+       /**
+        * value to use to signal that the caller wants the next set of events
+        */
+       public static final int kNextOffset = -1;
+
+       /**
+        * value to use to signal there's no filter on the response stream.
+        */
+       public static final String kNoFilter = "";
+
+       //Added for Metric publish
+       public static final int kStdCambriaServicePort = 3904;
+       public static final String kBasePath = "/events/";
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPAccessDeniedException.java b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPAccessDeniedException.java
new file mode 100644 (file)
index 0000000..c70190f
--- /dev/null
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.exception;
+
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+
+public class DMaaPAccessDeniedException extends CambriaApiException{
+       
+       
+       
+       public DMaaPAccessDeniedException(ErrorResponse errRes) {
+               super(errRes);
+               
+       }
+
+       /**
+        * 
+        */
+       private static final long serialVersionUID = 1L;
+
+       
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPCambriaExceptionMapper.java b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPCambriaExceptionMapper.java
new file mode 100644 (file)
index 0000000..ce00cc6
--- /dev/null
@@ -0,0 +1,93 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.exception;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.http.HttpStatus;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.inject.Singleton;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+
+/**
+ * Exception Mapper class to handle
+ * CambriaApiException 
+ * @author rajashree.khare
+ *
+ */
+@Provider
+@Singleton
+public class DMaaPCambriaExceptionMapper implements ExceptionMapper<CambriaApiException>{
+
+private ErrorResponse errRes;
+
+
+private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPCambriaExceptionMapper.class);
+       
+       @Autowired
+       private DMaaPErrorMessages msgs;
+       
+       public DMaaPCambriaExceptionMapper() {
+               super();
+               LOGGER.info("Cambria Exception Mapper Created..");
+       }
+       
+       @Override
+       public Response toResponse(CambriaApiException ex) {
+
+               LOGGER.info("Reached Cambria Exception Mapper..");
+               
+               /**
+                * Cambria Generic Exception
+                */
+               if(ex instanceof CambriaApiException)
+               {
+                       
+                       errRes = ex.getErrRes();
+                       if(errRes!=null) {
+                               
+                               return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+                           .build();
+                       }
+                       else
+                       {
+                               return Response.status(ex.getStatus()).entity(ex.getMessage()).type(MediaType.APPLICATION_JSON)
+                                   .build();
+                       }
+                       
+                       
+               }
+               else
+               {
+                       errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED, DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), msgs.getServerUnav());
+                       return Response.status(HttpStatus.SC_EXPECTATION_FAILED).entity(errRes).type(MediaType.APPLICATION_JSON).build();
+               }
+               
+       }
+
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPErrorMessages.java b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPErrorMessages.java
new file mode 100644 (file)
index 0000000..96192ec
--- /dev/null
@@ -0,0 +1,247 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.exception;
+
+import org.springframework.stereotype.Component;
+import org.springframework.web.context.support.SpringBeanAutowiringSupport;
+
+import javax.annotation.PostConstruct;
+
+/**
+ * This Class reads the error message properties
+ * from the properties file
+ * @author rajashree.khare
+ *
+ */
+@Component
+public class DMaaPErrorMessages {
+
+       
+                       
+
+       //@Value("${resource.not.found}")
+       private String notFound="The requested resource was not found.Please verify the URL and try again";
+       
+//     @Value("${server.unavailable}")
+       private String serverUnav="Server is temporarily unavailable or busy.Try again later, or try another server in the cluster.";
+       
+//     @Value("${http.method.not.allowed}")
+       private String methodNotAllowed="The specified HTTP method is not allowed for the requested resource.Enter a valid HTTP method and try again.";
+       
+       //@Value("${incorrect.request.json}")
+       private String badRequest="Incorrect JSON object. Please correct the JSON format and try again.";
+       
+//     @Value("${network.time.out}")
+       private String nwTimeout="Connection to the DMaaP MR was timed out.Please try again.";
+       
+       //@Value("${get.topic.failure}")
+       private String topicsfailure="Failed to retrieve list of all topics.";
+       
+       //@Value("${not.permitted.access.1}")
+       private String notPermitted1="Access Denied.User does not have permission to perform ";
+       
+       //@Value("${not.permitted.access.2}")
+       private String notPermitted2=" operation on Topic:";
+       
+       //@Value("${get.topic.details.failure}")
+       private String topicDetailsFail="Failed to retrieve details of topic:";
+       
+       //@Value("${create.topic.failure}")
+       private String createTopicFail="Failed to create topic:";
+       
+       //@Value("${delete.topic.failure}")
+       private String deleteTopicFail="Failed to delete topic:";
+       
+       //@Value("${incorrect.json}")
+       private String incorrectJson="Incorrect JSON object.Could not parse JSON. Please correct the JSON format and try again.";
+       
+       //@Value("${consume.msg.error}")
+       private String consumeMsgError="Error while reading data from topic.";
+       
+       //@Value("${publish.msg.error}")
+       private String publishMsgError="Error while publishing data to topic.";
+       
+       
+       //@Value("${publish.msg.count}")
+       private String publishMsgCount="Successfully published number of messages :";
+       
+       
+       //@Value("${authentication.failure}")
+       private String authFailure="Access Denied: Invalid Credentials. Enter a valid MechId and Password and try again.";
+       //@Value("${msg_size_exceeds}")
+       private String msgSizeExceeds="Message size exceeds the default size.";
+       
+       
+       //@Value("${topic.not.exist}")
+       private String topicNotExist="No such topic exists.";
+       
+       public String getMsgSizeExceeds() {
+               return msgSizeExceeds;
+       }
+
+       public void setMsgSizeExceeds(String msgSizeExceeds) {
+               this.msgSizeExceeds = msgSizeExceeds;
+       }
+
+       public String getNotFound() {
+               return notFound;
+       }
+
+       public void setNotFound(String notFound) {
+               this.notFound = notFound;
+       }
+
+       public String getServerUnav() {
+               return serverUnav;
+       }
+
+       public void setServerUnav(String serverUnav) {
+               this.serverUnav = serverUnav;
+       }
+
+       public String getMethodNotAllowed() {
+               return methodNotAllowed;
+       }
+
+       public void setMethodNotAllowed(String methodNotAllowed) {
+               this.methodNotAllowed = methodNotAllowed;
+       }
+
+       public String getBadRequest() {
+               return badRequest;
+       }
+
+       public void setBadRequest(String badRequest) {
+               this.badRequest = badRequest;
+       }
+
+       public String getNwTimeout() {
+               return nwTimeout;
+       }
+
+       public void setNwTimeout(String nwTimeout) {
+               this.nwTimeout = nwTimeout;
+       }
+
+       public String getNotPermitted1() {
+               return notPermitted1;
+       }
+
+       public void setNotPermitted1(String notPermitted1) {
+               this.notPermitted1 = notPermitted1;
+       }
+
+       public String getNotPermitted2() {
+               return notPermitted2;
+       }
+
+       public void setNotPermitted2(String notPermitted2) {
+               this.notPermitted2 = notPermitted2;
+       }
+
+       public String getTopicsfailure() {
+               return topicsfailure;
+       }
+
+       public void setTopicsfailure(String topicsfailure) {
+               this.topicsfailure = topicsfailure;
+       }
+
+       public String getTopicDetailsFail() {
+               return topicDetailsFail;
+       }
+
+       public void setTopicDetailsFail(String topicDetailsFail) {
+               this.topicDetailsFail = topicDetailsFail;
+       }
+
+       public String getCreateTopicFail() {
+               return createTopicFail;
+       }
+
+       public void setCreateTopicFail(String createTopicFail) {
+               this.createTopicFail = createTopicFail;
+       }
+
+       public String getIncorrectJson() {
+               return incorrectJson;
+       }
+
+       public void setIncorrectJson(String incorrectJson) {
+               this.incorrectJson = incorrectJson;
+       }
+
+       public String getDeleteTopicFail() {
+               return deleteTopicFail;
+       }
+
+       public void setDeleteTopicFail(String deleteTopicFail) {
+               this.deleteTopicFail = deleteTopicFail;
+       }
+
+       public String getConsumeMsgError() {
+               return consumeMsgError;
+       }
+
+       public void setConsumeMsgError(String consumeMsgError) {
+               this.consumeMsgError = consumeMsgError;
+       }
+
+       public String getPublishMsgError() {
+               return publishMsgError;
+       }
+
+       public void setPublishMsgError(String publishMsgError) {
+               this.publishMsgError = publishMsgError;
+       }
+
+       public String getPublishMsgCount() {
+               return publishMsgCount;
+       }
+
+       public String getAuthFailure() {
+               return authFailure;
+       }
+
+       public void setAuthFailure(String authFailure) {
+               this.authFailure = authFailure;
+       }
+
+       public void setPublishMsgCount(String publishMsgCount) {
+               this.publishMsgCount = publishMsgCount;
+       }
+
+       public String getTopicNotExist() {
+               return topicNotExist;
+       }
+
+       public void setTopicNotExist(String topicNotExist) {
+               this.topicNotExist = topicNotExist;
+       }
+       
+       
+       @PostConstruct
+       public void init() {
+           SpringBeanAutowiringSupport.processInjectionBasedOnCurrentContext(this);
+       }
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPResponseCode.java b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPResponseCode.java
new file mode 100644 (file)
index 0000000..a7b073a
--- /dev/null
@@ -0,0 +1,93 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.exception;
+
+/**
+ * Define the Error Response Codes for MR
+ * using this enumeration
+ * @author rajashree.khare
+ *
+ */
+public enum DMaaPResponseCode {
+       
+         
+         /**
+          * GENERIC
+          */
+         RESOURCE_NOT_FOUND(3001),
+         SERVER_UNAVAILABLE(3002),
+         METHOD_NOT_ALLOWED(3003),
+         GENERIC_INTERNAL_ERROR(1004),
+         /**
+          * AAF
+          */
+         INVALID_CREDENTIALS(4001),
+         ACCESS_NOT_PERMITTED(4002),
+         UNABLE_TO_AUTHORIZE(4003),
+         /**
+          * PUBLISH AND SUBSCRIBE
+          */
+         MSG_SIZE_EXCEEDS_BATCH_LIMIT(5001),
+         UNABLE_TO_PUBLISH(5002),
+         INCORRECT_BATCHING_FORMAT(5003),
+         MSG_SIZE_EXCEEDS_MSG_LIMIT(5004),
+         INCORRECT_JSON(5005),
+         CONN_TIMEOUT(5006),
+         PARTIAL_PUBLISH_MSGS(5007),
+         CONSUME_MSG_ERROR(5008),
+         PUBLISH_MSG_ERROR(5009), 
+         RETRIEVE_TRANSACTIONS(5010),
+         RETRIEVE_TRANSACTIONS_DETAILS(5011),
+         TOO_MANY_REQUESTS(5012),
+         
+         RATE_LIMIT_EXCEED(301),
+        
+         /**
+          * TOPICS
+          */
+       GET_TOPICS_FAIL(6001),
+       GET_TOPICS_DETAILS_FAIL(6002),
+       CREATE_TOPIC_FAIL(6003),
+       DELETE_TOPIC_FAIL(6004),
+       GET_PUBLISHERS_BY_TOPIC(6005),
+       GET_CONSUMERS_BY_TOPIC(6006),
+       PERMIT_PUBLISHER_FOR_TOPIC(6007),
+       REVOKE_PUBLISHER_FOR_TOPIC(6008),
+       PERMIT_CONSUMER_FOR_TOPIC(6009),
+       REVOKE_CONSUMER_FOR_TOPIC(6010),
+       GET_CONSUMER_CACHE(6011),
+       DROP_CONSUMER_CACHE(6012),
+       GET_METRICS_ERROR(6013),
+       GET_BLACKLIST(6014),
+       ADD_BLACKLIST(6015),
+       REMOVE_BLACKLIST(6016),
+       TOPIC_NOT_IN_AAF(6017);
+       private int responseCode;
+       
+       public int getResponseCode() {
+               return responseCode;
+       }
+       private DMaaPResponseCode (final int code) {
+               responseCode = code;
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPWebExceptionMapper.java b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPWebExceptionMapper.java
new file mode 100644 (file)
index 0000000..0ee5498
--- /dev/null
@@ -0,0 +1,129 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.exception;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.http.HttpStatus;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.inject.Singleton;
+import javax.ws.rs.*;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+
+/**
+ * Exception Mapper class to handle
+ * Jersey Exceptions
+ * @author rajashree.khare
+ *
+ */
+@Provider
+@Singleton
+public class DMaaPWebExceptionMapper implements ExceptionMapper<WebApplicationException>{
+       
+       
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPWebExceptionMapper.class);
+       private ErrorResponse errRes;
+       
+       @Autowired
+       private DMaaPErrorMessages msgs;
+       
+       public DMaaPWebExceptionMapper() {
+               super();
+               LOGGER.info("WebException Mapper Created..");
+       }
+
+       @Override
+       public Response toResponse(WebApplicationException ex) {
+               
+               LOGGER.info("Reached WebException Mapper");
+               
+               /**
+                * Resource Not Found
+                */
+               if(ex instanceof NotFoundException)
+               {
+                       errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),msgs.getNotFound());
+                       
+                       LOGGER.info(errRes.toString());
+                       
+                       return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+                           .build();
+                       
+               }
+               
+               if(ex instanceof InternalServerErrorException)
+               {
+                       errRes = new ErrorResponse(HttpStatus.SC_INTERNAL_SERVER_ERROR,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav());
+                       
+                       LOGGER.info(errRes.toString());
+                       return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+                           .build();
+                       
+               }
+               
+               if(ex instanceof NotAuthorizedException)
+               {
+                       errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),msgs.getAuthFailure());
+                       
+                       LOGGER.info(errRes.toString());
+                       return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+                           .build();
+               }
+               
+               if(ex instanceof BadRequestException)
+               {
+                       errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,DMaaPResponseCode.INCORRECT_JSON.getResponseCode(),msgs.getBadRequest());
+                       
+                       LOGGER.info(errRes.toString());
+                       return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+                           .build();
+               }
+               if(ex instanceof NotAllowedException)
+               {
+                       errRes = new ErrorResponse(HttpStatus.SC_METHOD_NOT_ALLOWED,DMaaPResponseCode.METHOD_NOT_ALLOWED.getResponseCode(),msgs.getMethodNotAllowed());
+                       
+                       LOGGER.info(errRes.toString());
+                       return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+                           .build();
+               }
+               
+               if(ex instanceof ServiceUnavailableException)
+               {
+                       errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav());
+                       
+                       LOGGER.info(errRes.toString());
+                       return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+                           .build();
+               }
+               
+               
+               return Response.serverError().build();
+       }
+
+       
+
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/exception/ErrorResponse.java b/src/main/java/org/onap/dmaap/dmf/mr/exception/ErrorResponse.java
new file mode 100644 (file)
index 0000000..e7c138e
--- /dev/null
@@ -0,0 +1,136 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.exception;
+import org.json.JSONObject;
+
+/**
+ * Represents the Error Response Object 
+ * that is rendered as a JSON object when
+ * an exception or error occurs on MR Rest Service.
+ * @author rajashree.khare
+ *
+ */
+//@XmlRootElement
+public class ErrorResponse {
+       
+       private int httpStatusCode;
+       private int mrErrorCode;
+    private String errorMessage;
+    private String helpURL;
+    private String statusTs;
+    private String topic;
+    private String publisherId;
+    private String publisherIp;
+    private String subscriberId;
+    private String subscriberIp;
+       
+
+       public ErrorResponse(int httpStatusCode, int mrErrorCode,
+                       String errorMessage, String helpURL, String statusTs, String topic,
+                       String publisherId, String publisherIp, String subscriberId,
+                       String subscriberIp) {
+               super();
+               this.httpStatusCode = httpStatusCode;
+               this.mrErrorCode = mrErrorCode;
+               this.errorMessage = errorMessage;
+               this.helpURL = "http://onap.readthedocs.io";
+               this.statusTs = statusTs;
+               this.topic = topic;
+               this.publisherId = publisherId;
+               this.publisherIp = publisherIp;
+               this.subscriberId = subscriberId;
+               this.subscriberIp = subscriberIp;
+       }
+
+       public ErrorResponse(int httpStatusCode, int mrErrorCode,
+                       String errorMessage) {
+               super();
+               this.httpStatusCode = httpStatusCode;
+               this.mrErrorCode = mrErrorCode;
+               this.errorMessage = errorMessage;
+               this.helpURL = "http://onap.readthedocs.io";
+               
+       }
+       
+       public int getHttpStatusCode() {
+               return httpStatusCode;
+       }
+
+       public void setHttpStatusCode(int httpStatusCode) {
+               this.httpStatusCode = httpStatusCode;
+       }
+       
+       public int getMrErrorCode() {
+               return mrErrorCode;
+       }
+
+
+       public void setMrErrorCode(int mrErrorCode) {
+               this.mrErrorCode = mrErrorCode;
+       }
+
+       
+       public String getErrorMessage() {
+               return errorMessage;
+       }
+
+       public void setErrorMessage(String errorMessage) {
+               this.errorMessage = errorMessage;
+       }
+
+       public String getHelpURL() {
+               return helpURL;
+       }
+
+       public void setHelpURL(String helpURL) {
+               this.helpURL = helpURL;
+       }
+
+       @Override
+       public String toString() {
+               return "ErrorResponse {\"httpStatusCode\":\"" + httpStatusCode
+                               + "\", \"mrErrorCode\":\"" + mrErrorCode + "\", \"errorMessage\":\""
+                               + errorMessage + "\", \"helpURL\":\"" + helpURL + "\", \"statusTs\":\""+statusTs+"\""
+                               + ", \"topicId\":\""+topic+"\", \"publisherId\":\""+publisherId+"\""
+                               + ", \"publisherIp\":\""+publisherIp+"\", \"subscriberId\":\""+subscriberId+"\""
+                               + ", \"subscriberIp\":\""+subscriberIp+"\"}";
+       }
+       
+       public String getErrMapperStr1() {
+               return "ErrorResponse [httpStatusCode=" + httpStatusCode + ", mrErrorCode=" + mrErrorCode + ", errorMessage="
+                               + errorMessage + ", helpURL=" + helpURL + "]";
+       }
+
+       
+       
+       public JSONObject getErrMapperStr() {
+               JSONObject o = new JSONObject();
+               o.put("status", getHttpStatusCode());
+               o.put("mrstatus", getMrErrorCode());
+               o.put("message", getErrorMessage());
+               o.put("helpURL", getHelpURL());
+               return o;
+       }
+       
+    
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/listener/CambriaServletContextListener.java b/src/main/java/org/onap/dmaap/dmf/mr/listener/CambriaServletContextListener.java
new file mode 100644 (file)
index 0000000..0c27d0f
--- /dev/null
@@ -0,0 +1,64 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.listener;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+
+/**
+ * This is the Cambria Servlet Context Listner which helpes while loading the app which provide the endpoints 
+ * @author nilanjana.maity
+ *
+ */
+public class CambriaServletContextListener implements ServletContextListener {
+       
+       DME2EndPointLoader loader = DME2EndPointLoader.getInstance();
+
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaServletContextListener.class);
+       
+
+       @Override
+       
+       /**
+        * contextDestroyed() loads unpublished end points
+        * @param arg0
+        */
+       public void contextDestroyed(ServletContextEvent arg0) {
+               log.info("CambriaServletContextListener contextDestroyed");
+               
+               loader.unPublishEndPoints();
+       }
+
+       @Override
+       /**
+        * contextInitialized() loads published end points
+        * @param arg0
+        */
+       public void contextInitialized(ServletContextEvent arg0) {
+               log.info("CambriaServletContextListener contextInitialized");
+               loader.publishEndPoints();
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/listener/DME2EndPointLoader.java b/src/main/java/org/onap/dmaap/dmf/mr/listener/DME2EndPointLoader.java
new file mode 100644 (file)
index 0000000..eb5bf37
--- /dev/null
@@ -0,0 +1,123 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.listener;
+
+import com.att.aft.dme2.api.DME2Exception;
+import com.att.aft.dme2.api.DME2Manager;
+import com.att.aft.dme2.manager.registry.DME2EndpointRegistry;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.onap.dmaap.dmf.mr.service.impl.EventsServiceImpl;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class DME2EndPointLoader {
+
+       private String latitude;
+       private String longitude;
+       private String version;
+       private String serviceName;
+       private String env;
+       private String routeOffer;
+       private String hostName;
+       private String port;
+       private String contextPath;
+       private String protocol;
+       private String serviceURL;
+       private static DME2EndPointLoader loader = new DME2EndPointLoader();
+
+       private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
+       private DME2EndPointLoader() {
+       }
+
+       public static DME2EndPointLoader getInstance() {
+               return loader;
+       }
+
+       /**
+        * publishing endpoints
+        */
+       public void publishEndPoints() {
+
+               try {
+                       InputStream input = this.getClass().getResourceAsStream("/endpoint.properties");
+                       Properties props = new Properties();
+                       props.load(input);
+
+                       latitude = props.getProperty("Latitude");
+                       longitude = props.getProperty("Longitude");
+                       version = props.getProperty("Version");
+                       serviceName = props.getProperty("ServiceName");
+                       env = props.getProperty("Environment");
+                       routeOffer = props.getProperty("RouteOffer");
+                       hostName = props.getProperty("HostName");
+                       port = props.getProperty("Port");
+                       contextPath = props.getProperty("ContextPath");
+                       protocol = props.getProperty("Protocol");
+
+                       System.setProperty("AFT_LATITUDE", latitude);
+                       System.setProperty("AFT_LONGITUDE", longitude);
+                       System.setProperty("AFT_ENVIRONMENT", "AFTUAT");
+
+                       serviceURL = "service=" + serviceName + "/" + "version=" + version + "/" + "envContext=" + env + "/"
+                                       + "routeOffer=" + routeOffer;
+
+                       DME2Manager manager = new DME2Manager("testEndpointPublish", props);
+                       manager.setClientCredentials("sh301n", "");
+                       DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry();
+                       // Publish API takes service name, context path, hostname, port and
+                       // protocol as args
+                       svcRegistry.publish(serviceURL, contextPath, hostName, Integer.parseInt(port), protocol);
+
+               } catch (IOException | DME2Exception e) {
+                       LOG.error("Failed due to :" + e);
+               }
+
+       }
+/**
+ * unpublishing endpoints
+ */
+       public void unPublishEndPoints() {
+
+               DME2Manager manager;
+               try {
+                       System.setProperty("AFT_LATITUDE", latitude);
+                       System.setProperty("AFT_LONGITUDE", longitude);
+                       System.setProperty("AFT_ENVIRONMENT", "AFTUAT");
+
+                       manager = DME2Manager.getDefaultInstance();
+                       DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry();
+                       svcRegistry.unpublish(serviceURL, hostName, Integer.parseInt(port));
+               } catch (DME2Exception e) {
+                       LOG.error("Failed due to DME2Exception" + e);
+               }
+
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker.java b/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker.java
new file mode 100644 (file)
index 0000000..8acb67d
--- /dev/null
@@ -0,0 +1,92 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metabroker;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+
+import java.util.List;
+
+/**
+ * A broker interface to manage metadata around topics, etc.
+ * 
+ * @author peter
+ *
+ */
+public interface Broker {
+       /**
+        * 
+        * @author anowarul.islam
+        *
+        */
+       public class TopicExistsException extends Exception {
+               /**
+                * 
+                * @param topicName
+                */
+               public TopicExistsException(String topicName) {
+                       super("Topic " + topicName + " exists.");
+               }
+
+               private static final long serialVersionUID = 1L;
+       }
+
+       /**
+        * Get all topics in the underlying broker.
+        * 
+        * @return
+        * @throws ConfigDbException
+        */
+       List<Topic> getAllTopics() throws ConfigDbException;
+
+       /**
+        * Get a specific topic from the underlying broker.
+        * 
+        * @param topic
+        * @return a topic, or null
+        */
+       Topic getTopic(String topic) throws ConfigDbException;
+
+       /**
+        * create a  topic
+        * 
+        * @param topic
+        * @param description
+        * @param ownerApiKey
+        * @param partitions
+        * @param replicas
+        * @param transactionEnabled
+        * @return
+        * @throws TopicExistsException
+        * @throws CambriaApiException
+        */
+       Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas,
+                       boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException;
+
+       /**
+        * Delete a topic by name
+        * 
+        * @param topic
+        */
+       void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException,ConfigDbException;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker1.java b/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker1.java
new file mode 100644 (file)
index 0000000..89a7251
--- /dev/null
@@ -0,0 +1,95 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metabroker;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+
+import java.util.List;
+
+/**
+ * A broker interface to manage metadata around topics, etc.
+ * alternate for Broker1 to avoid this error  in spring boot
+ *org.springframework.beans.factory.NoUniqueBeanDefinitionException:
+ * No qualifying bean of type [com.att.dmf.mr.metabroker.Broker] is defined: 
+ * expected single matching bean but found 2: mmb,dMaaPKafkaMetaBroker
+
+ *
+ */
+public interface Broker1 {
+       /**
+        * 
+        * @author Ramkumar
+        *
+        */
+       public class TopicExistsException extends Exception {
+               /**
+                * 
+                * @param topicName
+                */
+               public TopicExistsException(String topicName) {
+                       super("Topic " + topicName + " exists.");
+               }
+
+               private static final long serialVersionUID = 1L;
+       }
+
+       /**
+        * Get all topics in the underlying broker.
+        * 
+        * @return
+        * @throws ConfigDbException
+        */
+       List<Topic> getAllTopics() throws ConfigDbException;
+
+       /**
+        * Get a specific topic from the underlying broker.
+        * 
+        * @param topic
+        * @return a topic, or null
+        */
+       Topic getTopic(String topic) throws ConfigDbException;
+
+       /**
+        * create a  topic
+        * 
+        * @param topic
+        * @param description
+        * @param ownerApiKey
+        * @param partitions
+        * @param replicas
+        * @param transactionEnabled
+        * @return
+        * @throws TopicExistsException
+        * @throws CambriaApiException
+        */
+       Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas,
+                       boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException;
+
+       /**
+        * Delete a topic by name
+        * 
+        * @param topic
+        */
+       void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException,ConfigDbException;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Topic.java b/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Topic.java
new file mode 100644 (file)
index 0000000..04df4b4
--- /dev/null
@@ -0,0 +1,134 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metabroker;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource;
+
+/**
+ * This is the interface for topic and all the topic related operations
+ * get topic name, owner, description, transactionEnabled etc.
+ * @author nilanjana.maity
+ *
+ */
+public interface Topic extends ReadWriteSecuredResource
+{      
+       /**
+        * User defined exception for access denied while access the topic for Publisher and consumer
+        * @author nilanjana.maity
+        *
+        *//*
+       public class AccessDeniedException extends Exception
+       
+               *//**
+                * AccessDenied Description
+                *//*
+               
+               *//**
+                * AccessDenied Exception for the user while authenticating the user request
+                * @param user
+                *//*
+               
+               private static final long serialVersionUID = 1L;
+       }*/
+
+       /**
+        * Get this topic's name
+        * @return
+        */
+       String getName ();
+
+       /**
+        * Get the API key of the owner of this topic.
+        * @return
+        */
+       String getOwner ();
+
+       /**
+        * Get a description of the topic, as set by the owner at creation time.
+        * @return
+        */
+       String getDescription ();
+       
+       /**
+        * If the topic is transaction enabled
+        * @return boolean true/false
+        */
+       boolean isTransactionEnabled();
+       
+       /**
+        * Get the ACL for reading on this topic. Can be null.
+        * @return
+        */
+       NsaAcl getReaderAcl ();
+
+       /**
+        * Get the ACL for writing on this topic.  Can be null.
+        * @return
+        */
+       NsaAcl getWriterAcl ();
+
+       /**
+        * Check if this user can read the topic. Throw otherwise. Note that
+        * user may be null.
+        * @param user
+        */
+       void checkUserRead ( NsaApiKey user ) throws AccessDeniedException;
+
+       /**
+        * Check if this user can write to the topic. Throw otherwise. Note
+        * that user may be null.
+        * @param user
+        */
+       void checkUserWrite ( NsaApiKey user ) throws AccessDeniedException;
+
+       /**
+        * allow the given user to publish
+        * @param publisherId
+        * @param asUser
+        */
+       void permitWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
+
+       /**
+        * deny the given user from publishing
+        * @param publisherId
+        * @param asUser
+        */
+       void denyWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
+
+       /**
+        * allow the given user to read the topic
+        * @param consumerId
+        * @param asUser
+        */
+       void permitReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
+
+       /**
+        * deny the given user from reading the topic
+        * @param consumerId
+        * @param asUser
+        * @throws ConfigDbException 
+        */
+       void denyReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java
new file mode 100644 (file)
index 0000000..023bb28
--- /dev/null
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metrics.publisher;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * A Cambria batching publisher is a publisher with additional functionality
+ * for managing delayed sends.
+ * 
+ * @author peter
+ *
+ */
+public interface CambriaBatchingPublisher extends CambriaPublisher
+{
+       /**
+        * Get the number of messages that have not yet been sent.
+        * @return the number of pending messages
+        */
+       int getPendingMessageCount ();
+
+       /**
+        * Close this publisher, sending any remaining messages.
+        * @param timeout an amount of time to wait for unsent messages to be sent
+        * @param timeoutUnits the time unit for the timeout arg
+        * @return a list of any unsent messages after the timeout
+        * @throws IOException
+        * @throws InterruptedException 
+        */
+       List<message> close ( long timeout, TimeUnit timeoutUnits ) throws IOException, InterruptedException;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaClient.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaClient.java
new file mode 100644 (file)
index 0000000..2ce4216
--- /dev/null
@@ -0,0 +1,89 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metrics.publisher;
+
+
+
+//
+import com.att.eelf.configuration.EELFLogger;
+
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public interface CambriaClient {
+       /**
+        * An exception at the Cambria layer. This is used when the HTTP transport
+        * layer returns a success code but the transaction is not completed as
+        * expected.
+        */
+       public class CambriaApiException extends Exception {
+               /**
+                * 
+                * @param msg
+                */
+               public CambriaApiException(String msg) {
+                       super(msg);
+               }
+
+               /**
+                * 
+                * @param msg
+                * @param t
+                */
+               public CambriaApiException(String msg, Throwable t) {
+                       super(msg, t);
+               }
+
+               private static final long serialVersionUID = 1L;
+       }
+
+       /**
+        * Optionally set the Logger to use
+        * 
+        * @param log
+        */
+       void logTo(EELFLogger  log);
+
+       /**
+        * Set the API credentials for this client connection. Subsequent calls will
+        *  include authentication headers.who i
+        * 
+        * @param apiKey
+        * @param apiSecret
+        */
+       void setApiCredentials(String apiKey, String apiSecret);
+
+       /**
+        * Remove API credentials, if any, on this connection. Subsequent calls will
+        * not include authentication headers.
+        */
+       void clearApiCredentials();
+
+       /**
+        * Close this connection. Some client interfaces have additional close
+        * capability.
+        */
+       void close();
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaConsumer.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaConsumer.java
new file mode 100644 (file)
index 0000000..4d05070
--- /dev/null
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metrics.publisher;
+
+import java.io.IOException;
+
+/**
+ * This interface will provide fetch mechanism for consumer
+ * @author nilanjana.maity
+ *
+ */
+public interface CambriaConsumer extends CambriaClient
+{
+       /**
+        * Fetch a set of messages. The consumer's timeout and message limit are used if set in the constructor call. 
+
+        * @return a set of messages
+        * @throws IOException
+        */
+       Iterable<String> fetch () throws IOException;
+
+       /**
+        * Fetch a set of messages with an explicit timeout and limit for this call. These values
+        * override any set in the constructor call.
+        * 
+        * @param timeoutMs     The amount of time in milliseconds that the server should keep the connection
+        * open while waiting for message traffic. Use -1 for default timeout (controlled on the server-side).
+        * @param limit A limit on the number of messages returned in a single call. Use -1 for no limit.
+        * @return a set messages
+        * @throws IOException if there's a problem connecting to the server
+        */
+       Iterable<String> fetch ( int timeoutMs, int limit ) throws IOException;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisher.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisher.java
new file mode 100644 (file)
index 0000000..441d325
--- /dev/null
@@ -0,0 +1,101 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metrics.publisher;
+
+import java.io.IOException;
+import java.util.Collection;
+
+/**
+ * A Cambria publishing interface.
+ * 
+ * @author peter
+ *
+ */
+public interface CambriaPublisher extends CambriaClient {
+       /**
+        * A simple message container
+        */
+       public static class message {
+               /**
+                * 
+                * @param partition
+                * @param msg
+                */
+               public message(String partition, String msg) {
+                       fPartition = partition == null ? "" : partition;
+                       fMsg = msg;
+                       if (fMsg == null) {
+                               throw new IllegalArgumentException("Can't send a null message.");
+                       }
+               }
+
+               /**
+                * 
+                * @param msg
+                */
+               public message(message msg) {
+                       this(msg.fPartition, msg.fMsg);
+               }
+
+               /**
+                *  declaring partition string
+                */
+               public final String fPartition;
+               /**
+                * declaring fMsg String
+                */
+               public final String fMsg;
+       }
+
+       /**
+        * Send the given message using the given partition.
+        * 
+        * @param partition
+        * @param msg
+        * @return the number of pending messages
+        * @throws IOException
+        */
+       int send(String partition, String msg) throws IOException;
+
+       /**
+        * Send the given message using its partition.
+        * 
+        * @param msg
+        * @return the number of pending messages
+        * @throws IOException
+        */
+       int send(message msg) throws IOException;
+
+       /**
+        * Send the given messages using their partitions.
+        * 
+        * @param msgs
+        * @return the number of pending messages
+        * @throws IOException
+        */
+       int send(Collection<message> msgs) throws IOException;
+
+       /**
+        * Close this publisher. It's an error to call send() after close()
+        */
+       void close();
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisherUtility.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisherUtility.java
new file mode 100644 (file)
index 0000000..7a8beca
--- /dev/null
@@ -0,0 +1,155 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metrics.publisher;
+
+import org.apache.http.HttpHost;
+
+import java.io.UnsupportedEncodingException;
+import java.net.URLEncoder;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class CambriaPublisherUtility
+{
+       public static final String kBasePath = "/events/";
+       public static final int kStdCambriaServicePort = 3904;
+/**
+ * 
+ * Translates a string into <code>application/x-www-form-urlencoded</code>
+ * format using a specific encoding scheme.
+ * @param s
+ * @return
+ * 
+ */
+       public static String escape ( String s )
+       {
+               try
+               {
+                       return URLEncoder.encode ( s, "UTF-8");
+               }
+               catch ( UnsupportedEncodingException e )
+               {
+                       throw new RuntimeException ( e );
+               }
+       }
+/**
+ * 
+ * building url
+ * @param rawTopic
+ * @return
+ */
+       public static String makeUrl ( String rawTopic )
+       {
+               final String cleanTopic = escape ( rawTopic );
+               
+               final StringBuffer url = new StringBuffer().
+                       append ( CambriaPublisherUtility.kBasePath ).
+                       append ( cleanTopic );
+               return url.toString ();
+       }
+/**
+ * 
+ * building consumerUrl
+ * @param topic
+ * @param rawConsumerGroup
+ * @param rawConsumerId
+ * @return
+ */
+       public static String makeConsumerUrl ( String topic, String rawConsumerGroup, String rawConsumerId )
+       {
+               final String cleanConsumerGroup = escape ( rawConsumerGroup );
+               final String cleanConsumerId = escape ( rawConsumerId );
+               return CambriaPublisherUtility.kBasePath + topic + "/" + cleanConsumerGroup + "/" + cleanConsumerId;
+       }
+
+       /**
+        * Create a list of HttpHosts from an input list of strings. Input strings have
+        * host[:port] as format. If the port section is not provided, the default port is used.
+        * 
+        * @param hosts
+        * @return a list of hosts
+        */
+       public static List<HttpHost> createHostsList(Collection<String> hosts)
+       {
+               final ArrayList<HttpHost> convertedHosts = new ArrayList<>();
+               for ( String host : hosts )
+               {
+                       if ( host.length () == 0 ){
+                               continue;
+                       }
+                       
+                       convertedHosts.add ( hostForString ( host ) );
+               }
+               return convertedHosts;
+       }
+
+       /**
+        * Return an HttpHost from an input string. Input string has
+        * host[:port] as format. If the port section is not provided, the default port is used.
+        * 
+        * @param hosts
+        * @return a list of hosts
+        * if host.length<1 throws IllegalArgumentException
+        * 
+        */
+       public static HttpHost hostForString ( String host )
+       {
+               if ( host.length() < 1 ){
+                       throw new IllegalArgumentException ( "An empty host entry is invalid." );
+               }
+               
+               String hostPart = host;
+               int port = kStdCambriaServicePort;
+
+               final int colon = host.indexOf ( ':' );
+               if ( colon == 0 ){
+                       throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid." );
+               }
+               
+               if ( colon > 0 )
+               {
+                       hostPart = host.substring ( 0, colon ).trim();
+
+                       final String portPart = host.substring ( colon + 1 ).trim();
+                       if ( portPart.length () > 0 )
+                       {
+                               try
+                               {
+                                       port = Integer.parseInt ( portPart );
+                               }
+                               catch ( NumberFormatException x )
+                               {
+                                       throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid.", x );
+                               }
+                       }
+                       // else: use default port on "foo:"
+               }
+
+               return new HttpHost ( hostPart, port );
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java
new file mode 100644 (file)
index 0000000..e55692e
--- /dev/null
@@ -0,0 +1,418 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metrics.publisher;
+
+import org.onap.dmaap.dmf.mr.metrics.publisher.impl.DMaaPCambriaConsumerImpl;
+import org.onap.dmaap.dmf.mr.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher;
+
+import java.net.MalformedURLException;
+import java.nio.channels.NotYetConnectedException;
+import java.util.Collection;
+import java.util.TreeSet;
+import java.util.UUID;
+
+/**
+ * A factory for Cambria clients.<br/>
+ * <br/>
+ * Use caution selecting a consumer creator factory. If the call doesn't accept
+ * a consumer group name, then it creates a consumer that is not restartable.
+ * That is, if you stop your process and start it again, your client will NOT
+ * receive any missed messages on the topic. If you need to ensure receipt of
+ * missed messages, then you must use a consumer that's created with a group
+ * name and ID. (If you create multiple consumer processes using the same group,
+ * load is split across them. Be sure to use a different ID for each instance.)<br/>
+ * <br/>
+ * Publishers
+ * 
+ * @author peter
+ */
+public class DMaaPCambriaClientFactory {
+       /**
+        * Create a consumer instance with the default timeout and no limit on
+        * messages returned. This consumer operates as an independent consumer
+        * (i.e., not in a group) and is NOT re-startable across sessions.
+        * 
+        * @param hostList
+        *            A comma separated list of hosts to use to connect to Cambria.
+        *            You can include port numbers (3904 is the default). For
+        * 
+        * @param topic
+        *            The topic to consume
+        * 
+        * @return a consumer
+        */
+       public static CambriaConsumer createConsumer(String hostList, String topic) {
+               return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList),
+                               topic);
+       }
+
+       /**
+        * Create a consumer instance with the default timeout and no limit on
+        * messages returned. This consumer operates as an independent consumer
+        * (i.e., not in a group) and is NOT re-startable across sessions.
+        * 
+        * @param hostSet
+        *            The host used in the URL to Cambria. Entries can be
+        *            "host:port".
+        * @param topic
+        *            The topic to consume
+        * 
+        * @return a consumer
+        */
+       public static CambriaConsumer createConsumer(Collection<String> hostSet,
+                       String topic) {
+               return createConsumer(hostSet, topic, null);
+       }
+
+       /**
+        * Create a consumer instance with server-side filtering, the default
+        * timeout, and no limit on messages returned. This consumer operates as an
+        * independent consumer (i.e., not in a group) and is NOT re-startable
+        * across sessions.
+        * 
+        * @param hostSet
+        *            The host used in the URL to Cambria. Entries can be
+        *            "host:port".
+        * @param topic
+        *            The topic to consume
+        * @param filter
+        *            a filter to use on the server side
+        * 
+        * @return a consumer
+        */
+       public static CambriaConsumer createConsumer(Collection<String> hostSet,
+                       String topic, String filter) {
+               return createConsumer(hostSet, topic, UUID.randomUUID().toString(),
+                               "0", -1, -1, filter, null, null);
+       }
+
+       /**
+        * Create a consumer instance with the default timeout, and no limit on
+        * messages returned. This consumer can operate in a logical group and is
+        * re-startable across sessions when you use the same group and ID on
+        * restart.
+        * 
+        * @param hostSet
+        *            The host used in the URL to Cambria. Entries can be
+        *            "host:port".
+        * @param topic
+        *            The topic to consume
+        * @param consumerGroup
+        *            The name of the consumer group this consumer is part of
+        * @param consumerId
+        *            The unique id of this consume in its group
+        * 
+        * @return a consumer
+        */
+       public static CambriaConsumer createConsumer(Collection<String> hostSet,
+                       final String topic, final String consumerGroup,
+                       final String consumerId) {
+               return createConsumer(hostSet, topic, consumerGroup, consumerId, -1, -1);
+       }
+
+       /**
+        * Create a consumer instance with the default timeout, and no limit on
+        * messages returned. This consumer can operate in a logical group and is
+        * re-startable across sessions when you use the same group and ID on
+        * restart.
+        * 
+        * @param hostSet
+        *            The host used in the URL to Cambria. Entries can be
+        *            "host:port".
+        * @param topic
+        *            The topic to consume
+        * @param consumerGroup
+        *            The name of the consumer group this consumer is part of
+        * @param consumerId
+        *            The unique id of this consume in its group
+        * @param timeoutMs
+        *            The amount of time in milliseconds that the server should keep
+        *            the connection open while waiting for message traffic. Use -1
+        *            for default timeout.
+        * @param limit
+        *            A limit on the number of messages returned in a single call.
+        *            Use -1 for no limit.
+        * 
+        * @return a consumer
+        */
+       public static CambriaConsumer createConsumer(Collection<String> hostSet,
+                       final String topic, final String consumerGroup,
+                       final String consumerId, int timeoutMs, int limit) {
+               return createConsumer(hostSet, topic, consumerGroup, consumerId,
+                               timeoutMs, limit, null, null, null);
+       }
+
+       /**
+        * Create a consumer instance with the default timeout, and no limit on
+        * messages returned. This consumer can operate in a logical group and is
+        * re-startable across sessions when you use the same group and ID on
+        * restart. This consumer also uses server-side filtering.
+        * 
+        * @param hostList
+        *            A comma separated list of hosts to use to connect to Cambria.
+        *            You can include port numbers (3904 is the default). For
+        * @param topic
+        *            The topic to consume
+        * @param consumerGroup
+        *            The name of the consumer group this consumer is part of
+        * @param consumerId
+        *            The unique id of this consume in its group
+        * @param timeoutMs
+        *            The amount of time in milliseconds that the server should keep
+        *            the connection open while waiting for message traffic. Use -1
+        *            for default timeout.
+        * @param limit
+        *            A limit on the number of messages returned in a single call.
+        *            Use -1 for no limit.
+        * @param filter
+        *            A Highland Park filter expression using only built-in filter
+        *            components. Use null for "no filter".
+        * @param apiKey
+        *            key associated with a user
+        * @param apiSecret
+        *            of a user
+        * 
+        * @return a consumer
+        */
+       public static CambriaConsumer createConsumer(String hostList,
+                       final String topic, final String consumerGroup,
+                       final String consumerId, int timeoutMs, int limit, String filter,
+                       String apiKey, String apiSecret) {
+               return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList),
+                               topic, consumerGroup, consumerId, timeoutMs, limit, filter,
+                               apiKey, apiSecret);
+       }
+
+       /**
+        * Create a consumer instance with the default timeout, and no limit on
+        * messages returned. This consumer can operate in a logical group and is
+        * re-startable across sessions when you use the same group and ID on
+        * restart. This consumer also uses server-side filtering.
+        * 
+        * @param hostSet
+        *            The host used in the URL to Cambria. Entries can be
+        *            "host:port".
+        * @param topic
+        *            The topic to consume
+        * @param consumerGroup
+        *            The name of the consumer group this consumer is part of
+        * @param consumerId
+        *            The unique id of this consume in its group
+        * @param timeoutMs
+        *            The amount of time in milliseconds that the server should keep
+        *            the connection open while waiting for message traffic. Use -1
+        *            for default timeout.
+        * @param limit
+        *            A limit on the number of messages returned in a single call.
+        *            Use -1 for no limit.
+        * @param filter
+        *            A Highland Park filter expression using only built-in filter
+        *            components. Use null for "no filter".
+        * @param apiKey
+        *            key associated with a user
+        * @param apiSecret
+        *            of a user
+        * @return a consumer
+        */
+       public static CambriaConsumer createConsumer(Collection<String> hostSet,
+                       final String topic, final String consumerGroup,
+                       final String consumerId, int timeoutMs, int limit, String filter,
+                       String apiKey, String apiSecret) {
+               if (sfMock != null)
+                       return sfMock;
+               try {
+               return new DMaaPCambriaConsumerImpl(hostSet, topic, consumerGroup,
+                               consumerId, timeoutMs, limit, filter, apiKey, apiSecret);
+       } catch (MalformedURLException e) {
+               
+               NotYetConnectedException exception=new NotYetConnectedException();
+               exception.setStackTrace(e.getStackTrace());
+               
+               throw exception ;
+       }
+       }
+
+       /*************************************************************************/
+       /*************************************************************************/
+       /*************************************************************************/
+
+       /**
+        * Create a publisher that sends each message (or group of messages)
+        * immediately. Most applications should favor higher latency for much
+        * higher message throughput and the "simple publisher" is not a good
+        * choice.
+        * 
+        * @param hostlist
+        *            The host used in the URL to Cambria. Can be "host:port", can
+        *            be multiple comma-separated entries.
+        * @param topic
+        *            The topic on which to publish messages.
+        * @return a publisher
+        */
+       public static CambriaBatchingPublisher createSimplePublisher(
+                       String hostlist, String topic) {
+               return createBatchingPublisher(hostlist, topic, 1, 1);
+       }
+
+       /**
+        * Create a publisher that batches messages. Be sure to close the publisher
+        * to send the last batch and ensure a clean shutdown. Message payloads are
+        * not compressed.
+        * 
+        * @param hostlist
+        *            The host used in the URL to Cambria. Can be "host:port", can
+        *            be multiple comma-separated entries.
+        * @param topic
+        *            The topic on which to publish messages.
+        * @param maxBatchSize
+        *            The largest set of messages to batch
+        * @param maxAgeMs
+        *            The maximum age of a message waiting in a batch
+        * 
+        * @return a publisher
+        */
+       public static CambriaBatchingPublisher createBatchingPublisher(
+                       String hostlist, String topic, int maxBatchSize, long maxAgeMs) {
+               return createBatchingPublisher(hostlist, topic, maxBatchSize, maxAgeMs,
+                               false);
+       }
+
+       /**
+        * Create a publisher that batches messages. Be sure to close the publisher
+        * to send the last batch and ensure a clean shutdown.
+        * 
+        * @param hostlist
+        *            The host used in the URL to Cambria. Can be "host:port", can
+        *            be multiple comma-separated entries.
+        * @param topic
+        *            The topic on which to publish messages.
+        * @param maxBatchSize
+        *            The largest set of messages to batch
+        * @param maxAgeMs
+        *            The maximum age of a message waiting in a batch
+        * @param compress
+        *            use gzip compression
+        * 
+        * @return a publisher
+        */
+       public static CambriaBatchingPublisher createBatchingPublisher(
+                       String hostlist, String topic, int maxBatchSize, long maxAgeMs,
+                       boolean compress) {
+               return createBatchingPublisher(
+                               DMaaPCambriaConsumerImpl.stringToList(hostlist), topic,
+                               maxBatchSize, maxAgeMs, compress);
+       }
+
+       /**
+        * Create a publisher that batches messages. Be sure to close the publisher
+        * to send the last batch and ensure a clean shutdown.
+        * 
+        * @param hostSet
+        *            A set of hosts to be used in the URL to Cambria. Can be
+        *            "host:port". Use multiple entries to enable failover.
+        * @param topic
+        *            The topic on which to publish messages.
+        * @param maxBatchSize
+        *            The largest set of messages to batch
+        * @param maxAgeMs
+        *            The maximum age of a message waiting in a batch
+        * @param compress
+        *            use gzip compression
+        * 
+        * @return a publisher
+        */
+       public static CambriaBatchingPublisher createBatchingPublisher(
+                       String[] hostSet, String topic, int maxBatchSize, long maxAgeMs,
+                       boolean compress) {
+               final TreeSet<String> hosts = new TreeSet<String>();
+               for (String hp : hostSet) {
+                       hosts.add(hp);
+               }
+               return createBatchingPublisher(hosts, topic, maxBatchSize, maxAgeMs,
+                               compress);
+       }
+
+       /**
+        * Create a publisher that batches messages. Be sure to close the publisher
+        * to send the last batch and ensure a clean shutdown.
+        * 
+        * @param hostSet
+        *            A set of hosts to be used in the URL to Cambria. Can be
+        *            "host:port". Use multiple entries to enable failover.
+        * @param topic
+        *            The topic on which to publish messages.
+        * @param maxBatchSize
+        *            The largest set of messages to batch
+        * @param maxAgeMs
+        *            The maximum age of a message waiting in a batch
+        * @param compress
+        *            use gzip compression
+        * 
+        * @return a publisher
+        */
+       public static CambriaBatchingPublisher createBatchingPublisher(
+                       Collection<String> hostSet, String topic, int maxBatchSize,
+                       long maxAgeMs, boolean compress) {
+               return new DMaaPCambriaSimplerBatchPublisher.Builder()
+                               .againstUrls(hostSet).onTopic(topic)
+                               .batchTo(maxBatchSize, maxAgeMs).compress(compress).build();
+       }
+
+       /**
+        * Create an identity manager client to work with API keys.
+        * 
+        * @param hostSet
+        *            A set of hosts to be used in the URL to Cambria. Can be
+        *            "host:port". Use multiple entries to enable failover.
+        * @param apiKey
+        *            Your API key
+        * @param apiSecret
+        *            Your API secret
+        * @return an identity manager
+        */
+       
+
+       /**
+        * Create a topic manager for working with topics.
+        * 
+        * @param hostSet
+        *            A set of hosts to be used in the URL to Cambria. Can be
+        *            "host:port". Use multiple entries to enable failover.
+        * @param apiKey
+        *            Your API key
+        * @param apiSecret
+        *            Your API secret
+        * @return a topic manager
+        */
+       
+
+       /**
+        * Inject a consumer. Used to support unit tests.
+        * 
+        * @param cc
+        */
+       public static void $testInject(CambriaConsumer cc) {
+               sfMock = cc;
+       }
+
+       private static CambriaConsumer sfMock = null;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java
new file mode 100644 (file)
index 0000000..9b15528
--- /dev/null
@@ -0,0 +1,99 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metrics.publisher.impl;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.apiClient.http.CacheUse;
+import com.att.nsa.apiClient.http.HttpClient;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+
+import java.net.MalformedURLException;
+import java.util.Collection;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class CambriaBaseClient extends HttpClient implements org.onap.dmaap.dmf.mr.metrics.publisher.CambriaClient 
+{
+       protected CambriaBaseClient ( Collection<String> hosts ) throws MalformedURLException
+       {
+               this ( hosts, null );
+       }
+
+       public CambriaBaseClient ( Collection<String> hosts, String clientSignature ) throws MalformedURLException
+       {
+               
+                       
+               
+               super(ConnectionType.HTTP, hosts, CambriaConstants.kStdCambriaServicePort, clientSignature, CacheUse.NONE, 1, 1L, TimeUnit.MILLISECONDS, 32, 32, 600000);
+
+               
+               fLog = EELFManager.getInstance().getLogger(this.getClass().getName());
+               
+       }
+
+       @Override
+       public void close ()
+       {
+       }
+
+       public Set<String> jsonArrayToSet ( JSONArray a ) throws JSONException
+       {
+               if ( a == null ){
+                       return null;
+               }
+               
+               final TreeSet<String> set = new TreeSet<>();
+               for ( int i=0; i<a.length (); i++ )
+               {
+                       set.add ( a.getString ( i ));
+               }
+               return set;
+       }
+       /**
+        * @param log
+        */
+       public void logTo ( EELFLogger  log )
+       {
+               fLog = log; 
+               
+               
+       }
+
+       public EELFLogger  getLog ()
+       {
+               return fLog;
+       }
+       
+       private EELFLogger  fLog;
+       
+       
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/Clock.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/Clock.java
new file mode 100644 (file)
index 0000000..ac83a9e
--- /dev/null
@@ -0,0 +1,74 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metrics.publisher.impl;
+
+/**
+ * 
+ * This class maintains the system clocks
+ * @author nilanjana.maity
+ *
+ */
+public class Clock
+{
+       public synchronized static Clock getIt ()
+       {
+               if ( sfClock == null )
+               {
+                       sfClock = new Clock ();
+               }
+               return sfClock;
+       }
+
+       /**
+        * 
+        * Get the system's current time in milliseconds.
+        * @return the current time
+        * 
+        */
+       public static long now ()
+       {
+               return getIt().nowImpl ();
+       }
+
+       /**
+        * Get current time in milliseconds
+        * @return current time in ms
+        */
+       public long nowImpl ()
+       {
+               return System.currentTimeMillis ();
+       }
+
+       /**
+        * Initialize constructor
+        */
+       public Clock ()
+       {
+       }
+
+       private static Clock sfClock = null;
+
+       public synchronized static void register ( Clock testClock )
+       {
+               sfClock = testClock;
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
new file mode 100644 (file)
index 0000000..75dab74
--- /dev/null
@@ -0,0 +1,167 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metrics.publisher.impl;
+
+import com.att.nsa.apiClient.http.HttpException;
+import com.att.nsa.apiClient.http.HttpObjectNotFoundException;
+import jline.internal.Log;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.metrics.publisher.CambriaPublisherUtility;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.net.MalformedURLException;
+import java.net.URLEncoder;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class DMaaPCambriaConsumerImpl extends CambriaBaseClient
+               implements org.onap.dmaap.dmf.mr.metrics.publisher.CambriaConsumer {
+       private final String fTopic;
+       private final String fGroup;
+       private final String fId;
+       private final int fTimeoutMs;
+       private final int fLimit;
+       private final String fFilter;
+
+       /**
+        * 
+        * @param hostPart
+        * @param topic
+        * @param consumerGroup
+        * @param consumerId
+        * @param timeoutMs
+        * @param limit
+        * @param filter
+        * @param apiKey
+        * @param apiSecret
+        */
+       public DMaaPCambriaConsumerImpl(Collection<String> hostPart, final String topic, final String consumerGroup,
+                       final String consumerId, int timeoutMs, int limit, String filter, String apiKey, String apiSecret) throws MalformedURLException {
+               super(hostPart, topic + "::" + consumerGroup + "::" + consumerId);
+
+               fTopic = topic;
+               fGroup = consumerGroup;
+               fId = consumerId;
+               fTimeoutMs = timeoutMs;
+               fLimit = limit;
+               fFilter = filter;
+
+               setApiCredentials(apiKey, apiSecret);
+       }
+
+       /**
+        * method converts String to list
+        * 
+        * @param str
+        * @return
+        */
+       public static List<String> stringToList(String str) {
+               final LinkedList<String> set = new LinkedList<String>();
+               if (str != null) {
+                       final String[] parts = str.trim().split(",");
+                       for (String part : parts) {
+                               final String trimmed = part.trim();
+                               if (trimmed.length() > 0) {
+                                       set.add(trimmed);
+                               }
+                       }
+               }
+               return set;
+       }
+
+       @Override
+       public Iterable<String> fetch() throws IOException {
+               // fetch with the timeout and limit set in constructor
+               return fetch(fTimeoutMs, fLimit);
+       }
+
+       @Override
+       public Iterable<String> fetch(int timeoutMs, int limit) throws IOException {
+               final LinkedList<String> msgs = new LinkedList<String>();
+
+               final String urlPath = createUrlPath(timeoutMs, limit);
+
+               getLog().info("UEB GET " + urlPath);
+               try {
+                       final JSONObject o = get(urlPath);
+
+                       if (o != null) {
+                               final JSONArray a = o.getJSONArray("result");
+                               if (a != null) {
+                                       for (int i = 0; i < a.length(); i++) {
+                                               msgs.add(a.getString(i));
+                                       }
+                               }
+                       }
+               } catch (HttpObjectNotFoundException e) {
+                       // this can happen if the topic is not yet created. ignore.
+                       Log.error("Failed due to topic is not yet created" + e);
+               } catch (JSONException e) {
+                       // unexpected response
+                       reportProblemWithResponse();
+                       Log.error("Failed due to jsonException", e);
+               } catch (HttpException e) {
+                       throw new IOException(e);
+               }
+
+               return msgs;
+       }
+
+       public String createUrlPath(int timeoutMs, int limit) {
+               final StringBuilder url = new StringBuilder(CambriaPublisherUtility.makeConsumerUrl(fTopic, fGroup, fId));
+               final StringBuilder adds = new StringBuilder();
+               if (timeoutMs > -1) {
+                       adds.append("timeout=").append(timeoutMs);
+               }
+
+               if (limit > -1) {
+                       if (adds.length() > 0) {
+                               adds.append("&");
+                       }
+                       adds.append("limit=").append(limit);
+               }
+               if (fFilter != null && fFilter.length() > 0) {
+                       try {
+                               if (adds.length() > 0) {
+                                       adds.append("&");
+                               }
+                               adds.append("filter=").append(URLEncoder.encode(fFilter, "UTF-8"));
+                       } catch (UnsupportedEncodingException e) {
+                               Log.error("Failed due to UnsupportedEncodingException" + e);
+                       }
+               }
+               if (adds.length() > 0) {
+                       url.append("?").append(adds.toString());
+               }
+               return url.toString();
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
new file mode 100644 (file)
index 0000000..531564a
--- /dev/null
@@ -0,0 +1,422 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.metrics.publisher.impl;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.metrics.publisher.CambriaPublisherUtility;
+
+import javax.ws.rs.client.Client;
+import javax.ws.rs.client.ClientBuilder;
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.Response;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.MalformedURLException;
+import java.nio.channels.NotYetConnectedException;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ScheduledThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.zip.GZIPOutputStream;
+
+/**
+ * 
+ * class DMaaPCambriaSimplerBatchPublisher used to send the publish the messages
+ * in batch
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
+               implements org.onap.dmaap.dmf.mr.metrics.publisher.CambriaBatchingPublisher {
+       /**
+        * 
+        * static inner class initializes with urls, topic,batchSize
+        * 
+        * @author anowarul.islam
+        *
+        */
+       public static class Builder {
+               public Builder() {
+               }
+
+               /**
+                * constructor initialize with url
+                * 
+                * @param baseUrls
+                * @return
+                * 
+                */
+               public Builder againstUrls(Collection<String> baseUrls) {
+                       fUrls = baseUrls;
+                       return this;
+               }
+
+               /**
+                * constructor initializes with topics
+                * 
+                * @param topic
+                * @return
+                * 
+                */
+               public Builder onTopic(String topic) {
+                       fTopic = topic;
+                       return this;
+               }
+
+               /**
+                * constructor initilazes with batch size and batch time
+                * 
+                * @param maxBatchSize
+                * @param maxBatchAgeMs
+                * @return
+                * 
+                */
+               public Builder batchTo(int maxBatchSize, long maxBatchAgeMs) {
+                       fMaxBatchSize = maxBatchSize;
+                       fMaxBatchAgeMs = maxBatchAgeMs;
+                       return this;
+               }
+
+               /**
+                * constructor initializes with compress
+                * 
+                * @param compress
+                * @return
+                */
+               public Builder compress(boolean compress) {
+                       fCompress = compress;
+                       return this;
+               }
+
+               /**
+                * method returns DMaaPCambriaSimplerBatchPublisher object
+                * 
+                * @return
+                */
+               public DMaaPCambriaSimplerBatchPublisher build()  {
+                       
+                       try {
+                       return new DMaaPCambriaSimplerBatchPublisher(fUrls, fTopic, fMaxBatchSize, fMaxBatchAgeMs, fCompress);
+               } catch (MalformedURLException e) {
+                       
+                       NotYetConnectedException exception=new NotYetConnectedException();
+                       exception.setStackTrace(e.getStackTrace());
+                       
+                       throw exception ;
+               
+               }
+               }
+
+               private Collection<String> fUrls;
+               private String fTopic;
+               private int fMaxBatchSize = 100;
+               private long fMaxBatchAgeMs = 1000;
+               private boolean fCompress = false;
+       };
+
+       /**
+        * 
+        * @param partition
+        * @param msg
+        */
+       @Override
+       public int send(String partition, String msg) {
+               return send(new message(partition, msg));
+       }
+
+       /**
+        * @param msg
+        */
+       @Override
+       public int send(message msg) {
+               final LinkedList<message> list = new LinkedList<message>();
+               list.add(msg);
+               return send(list);
+       }
+
+       /**
+        * @param msgs
+        */
+       @Override
+       public synchronized int send(Collection<message> msgs) {
+               if (fClosed) {
+                       throw new IllegalStateException("The publisher was closed.");
+               }
+
+               for (message userMsg : msgs) {
+                       fPending.add(new TimestampedMessage(userMsg));
+               }
+               return getPendingMessageCount();
+       }
+
+       /**
+        * getPending message count
+        */
+       @Override
+       public synchronized int getPendingMessageCount() {
+               return fPending.size();
+       }
+
+       /**
+        * 
+        * @exception InterruptedException
+        * @exception IOException
+        */
+       @Override
+       public void close() {
+               try {
+                       final List<message> remains = close(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
+                       if (remains.isEmpty()) {
+                               getLog().warn("Closing publisher with " + remains.size() + " messages unsent. "
+                                               + "Consider using CambriaBatchingPublisher.close( long timeout, TimeUnit timeoutUnits ) to recapture unsent messages on close.");
+                       }
+               } catch (InterruptedException e) {
+                       getLog().warn("Possible message loss. " + e.getMessage(), e);
+                       Thread.currentThread().interrupt();
+               } catch (IOException e) {
+                       getLog().warn("Possible message loss. " + e.getMessage(), e);
+               }
+       }
+
+       /**
+        * @param time
+        * @param unit
+        */
+       @Override
+       public List<message> close(long time, TimeUnit unit) throws IOException, InterruptedException {
+               synchronized (this) {
+                       fClosed = true;
+
+                       // stop the background sender
+                       fExec.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
+                       fExec.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
+                       fExec.shutdown();
+               }
+
+               final long now = Clock.now();
+               final long waitInMs = TimeUnit.MILLISECONDS.convert(time, unit);
+               final long timeoutAtMs = now + waitInMs;
+
+               while (Clock.now() < timeoutAtMs && getPendingMessageCount() > 0) {
+                       send(true);
+                       Thread.sleep(250);
+               }
+               // synchronizing the current object
+               synchronized (this) {
+                       final LinkedList<message> result = new LinkedList<message>();
+                       fPending.drainTo(result);
+                       return result;
+               }
+       }
+
+       /**
+        * Possibly send a batch to the cambria server. This is called by the
+        * background thread and the close() method
+        * 
+        * @param force
+        */
+       private synchronized void send(boolean force) {
+               if (force || shouldSendNow()) {
+                       if (!sendBatch()) {
+                               getLog().warn("Send failed, " + fPending.size() + " message to send.");
+
+                               // note the time for back-off
+                               fDontSendUntilMs = sfWaitAfterError + Clock.now();
+                       }
+               }
+       }
+
+       /**
+        * 
+        * @return
+        */
+       private synchronized boolean shouldSendNow() {
+               boolean shouldSend = false;
+               if (fPending.isEmpty()) {
+                       final long nowMs = Clock.now();
+
+                       shouldSend = (fPending.size() >= fMaxBatchSize);
+                       if (!shouldSend) {
+                               final long sendAtMs = fPending.peek().timestamp + fMaxBatchAgeMs;
+                               shouldSend = sendAtMs <= nowMs;
+                       }
+
+                       // however, wait after an error
+                       shouldSend = shouldSend && nowMs >= fDontSendUntilMs;
+               }
+               return shouldSend;
+       }
+
+       /**
+        * 
+        * @return
+        */
+       private synchronized boolean sendBatch() {
+               // it's possible for this call to be made with an empty list. in this
+               // case, just return.
+               if (fPending.isEmpty()) {
+                       return true;
+               }
+
+               final long nowMs = Clock.now();
+               final String url = CambriaPublisherUtility.makeUrl(fTopic);
+
+               getLog().info("sending " + fPending.size() + " msgs to " + url + ". Oldest: "
+                               + (nowMs - fPending.peek().timestamp) + " ms");
+
+               try {
+
+                       final ByteArrayOutputStream baseStream = new ByteArrayOutputStream();
+                       OutputStream os = baseStream;
+                       if (fCompress) {
+                               os = new GZIPOutputStream(baseStream);
+                       }
+                       for (TimestampedMessage m : fPending) {
+                               os.write(("" + m.fPartition.length()).getBytes());
+                               os.write('.');
+                               os.write(("" + m.fMsg.length()).getBytes());
+                               os.write('.');
+                               os.write(m.fPartition.getBytes());
+                               os.write(m.fMsg.getBytes());
+                               os.write('\n');
+                       }
+                       os.close();
+
+                       final long startMs = Clock.now();
+
+                       // code from REST Client Starts
+
+                       
+                       
+
+                       Client client = ClientBuilder.newClient();
+                       String metricTopicname = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
+                        if (null==metricTopicname) {
+                                
+                        metricTopicname="msgrtr.apinode.metrics.dmaap";
+                        }
+                       WebTarget target = client
+                                       .target("http://localhost:" + CambriaConstants.kStdCambriaServicePort);
+                       target = target.path("/events/" + fTopic);
+                       getLog().info("url : " + target.getUri().toString());
+                       // API Key
+
+                       Entity<byte[]> data = Entity.entity(baseStream.toByteArray(), "application/cambria");
+
+                       Response response = target.request().post(data);
+                       
+                       getLog().info("Response received :: " + response.getStatus());
+                       getLog().info("Response received :: " + response.toString());
+
+                       // code from REST Client Ends
+
+                       
+                       fPending.clear();
+                       return true;
+               } catch (IllegalArgumentException x) {
+                       getLog().warn(x.getMessage(), x);
+               }
+               
+               catch (IOException x) {
+                       getLog().warn(x.getMessage(), x);
+               }
+               return false;
+       }
+
+       private final String fTopic;
+       private final int fMaxBatchSize;
+       private final long fMaxBatchAgeMs;
+       private final boolean fCompress;
+       private boolean fClosed;
+
+       private final LinkedBlockingQueue<TimestampedMessage> fPending;
+       private long fDontSendUntilMs;
+       private final ScheduledThreadPoolExecutor fExec;
+
+       private static final long sfWaitAfterError = 1000;
+
+       /**
+        * 
+        * @param hosts
+        * @param topic
+        * @param maxBatchSize
+        * @param maxBatchAgeMs
+        * @param compress
+        */
+       private DMaaPCambriaSimplerBatchPublisher(Collection<String> hosts, String topic, int maxBatchSize,
+                       long maxBatchAgeMs, boolean compress) throws MalformedURLException {
+
+               super(hosts);
+
+               if (topic == null || topic.length() < 1) {
+                       throw new IllegalArgumentException("A topic must be provided.");
+               }
+
+               fClosed = false;
+               fTopic = topic;
+               fMaxBatchSize = maxBatchSize;
+               fMaxBatchAgeMs = maxBatchAgeMs;
+               fCompress = compress;
+
+               fPending = new LinkedBlockingQueue<TimestampedMessage>();
+               fDontSendUntilMs = 0;
+
+               fExec = new ScheduledThreadPoolExecutor(1);
+               fExec.scheduleAtFixedRate(new Runnable() {
+                       @Override
+                       public void run() {
+                               send(false);
+                       }
+               }, 100, 50, TimeUnit.MILLISECONDS);
+       }
+
+       /**
+        * 
+        * 
+        * @author anowarul.islam
+        *
+        */
+       private static class TimestampedMessage extends message {
+               /**
+                * to store timestamp value
+                */
+               public final long timestamp;
+
+               /**
+                * constructor initialize with message
+                * 
+                * @param m
+                * 
+                */
+               public TimestampedMessage(message m) {
+                       super(m);
+                       timestamp = Clock.now();
+               }
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaEventSet.java b/src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaEventSet.java
new file mode 100644 (file)
index 0000000..cd55d2f
--- /dev/null
@@ -0,0 +1,113 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.resources;
+
+import com.att.nsa.apiServer.streams.ChunkedInputStream;
+import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.Publisher.message;
+import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaJsonStreamReader;
+import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaRawStreamReader;
+import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaStreamReader;
+import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaTextStreamReader;
+
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.zip.GZIPInputStream;
+
+/**
+ * An inbound event set.
+ * 
+ * @author peter
+ */
+public class CambriaEventSet {
+       private final reader fReader;
+
+       /**
+        * constructor initialization
+        * 
+        * @param mediaType
+        * @param originalStream
+        * @param chunked
+        * @param defPartition
+        * @throws CambriaApiException
+        */
+       public CambriaEventSet(String mediaType, InputStream originalStream,
+                       boolean chunked, String defPartition) throws CambriaApiException {
+               InputStream is = originalStream;
+               if (chunked) {
+                       is = new ChunkedInputStream(originalStream);
+               }
+
+               if (("application/json").equals(mediaType)) {
+                       if (chunked) {
+                               throw new CambriaApiException(
+                                               HttpServletResponse.SC_BAD_REQUEST,
+                                               "The JSON stream reader doesn't support chunking.");
+                       }
+                       fReader = new CambriaJsonStreamReader(is, defPartition);
+               } else if (("application/cambria").equals(mediaType)) {
+                       fReader = new CambriaStreamReader(is);
+               } else if (("application/cambria-zip").equals(mediaType)) {
+                       try {
+                               is = new GZIPInputStream(is);
+                       } catch (IOException e) {
+                               throw new CambriaApiException(HttpStatusCodes.k400_badRequest,
+                                               "Couldn't read compressed format: " + e);
+                       }
+                       fReader = new CambriaStreamReader(is);
+               } else if (("text/plain").equals(mediaType)) {
+                       fReader = new CambriaTextStreamReader(is, defPartition);
+               } else {
+                       fReader = new CambriaRawStreamReader(is, defPartition);
+               }
+       }
+
+       /**
+        * Get the next message from this event set. Returns null when the end of
+        * stream is reached. Will block until a message arrives (or the stream is
+        * closed/broken).
+        * 
+        * @return a message, or null
+        * @throws IOException
+        * @throws CambriaApiException
+        */
+       public message next() throws IOException, CambriaApiException {
+               return fReader.next();
+       }
+
+       /**
+        * 
+        * @author anowarul.islam
+        *
+        */
+       public interface reader {
+               /**
+                * 
+                * @return
+                * @throws IOException
+                * @throws CambriaApiException
+                */
+               message next() throws IOException, CambriaApiException;
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaOutboundEventStream.java b/src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaOutboundEventStream.java
new file mode 100644 (file)
index 0000000..ae03c3c
--- /dev/null
@@ -0,0 +1,568 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.resources;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.Consumer;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder.StreamWriter;
+import org.onap.dmaap.dmf.mr.utils.Utils;
+
+import javax.servlet.http.HttpServletRequest;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Date;
+
+
+/**
+ * class used to write the consumed messages
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class CambriaOutboundEventStream implements StreamWriter {
+       private static final int kTopLimit = 1024 * 4;
+
+       /**
+        * 
+        * static innerclass it takes all the input parameter for kafka consumer
+        * like limit, timeout, meta, pretty
+        * 
+        * @author anowarul.islam
+        *
+        */
+       public static class Builder {
+
+               // Required
+               private final Consumer fConsumer;
+               // private final rrNvReadable fSettings; // used during write to tweak
+               // format, decide to explicitly
+               // close stream or not
+
+               // Optional
+               private int fLimit;
+               private int fTimeoutMs;
+               private String fTopicFilter;
+               private boolean fPretty;
+               private boolean fWithMeta;
+               ArrayList<Consumer> fKafkaConsumerList;
+
+               
+               /**
+                * constructor it initializes all the consumer parameters
+                * 
+                * @param c
+                * @param settings
+                */
+               public Builder(Consumer c) {
+                       this.fConsumer = c;
+                       
+
+                       fLimit = CambriaConstants.kNoTimeout;
+                       fTimeoutMs = CambriaConstants.kNoLimit;
+                       fTopicFilter = CambriaConstants.kNoFilter;
+                       fPretty = false;
+                       fWithMeta = false;
+                       
+       
+               }
+
+               /**
+                * 
+                * constructor initializes with limit
+                * 
+                * @param l
+                *            only l no of messages will be consumed
+                * @return
+                */
+               public Builder limit(int l) {
+                       this.fLimit = l;
+                       return this;
+               }
+
+               /**
+                * constructor initializes with timeout
+                * 
+                * @param t
+                *            if there is no message to consume, them DMaaP will wait
+                *            for t time
+                * @return
+                */
+               public Builder timeout(int t) {
+                       this.fTimeoutMs = t;
+                       return this;
+               }
+
+               /**
+                * constructor initializes with filter
+                * 
+                * @param f
+                *            filter
+                * @return
+                */
+               public Builder filter(String f) {
+                       this.fTopicFilter = f;
+                       return this;
+               }
+
+               /**
+                * constructor initializes with boolean value pretty
+                * 
+                * @param p
+                *            messages print in new line
+                * @return
+                */
+               public Builder pretty(boolean p) {
+                       fPretty = p;
+                       return this;
+               }
+
+               /**
+                * constructor initializes with boolean value meta
+                * 
+                * @param withMeta,
+                *            along with messages offset will print
+                * @return
+                */
+               public Builder withMeta(boolean withMeta) {
+                       fWithMeta = withMeta;
+                       return this;
+               }
+
+               // public Builder atOffset ( int pos )
+               
+       
+               
+               // }
+               /**
+                * method returs object of CambriaOutboundEventStream
+                * 
+                * @return
+                * @throws CambriaApiException
+                */
+               public CambriaOutboundEventStream build() throws CambriaApiException {
+                       return new CambriaOutboundEventStream(this);
+               }
+       }
+
+       @SuppressWarnings("unchecked")
+       /**
+        * 
+        * @param builder
+        * @throws CambriaApiException
+        * 
+        */
+       private CambriaOutboundEventStream(Builder builder) throws CambriaApiException {
+               fConsumer = builder.fConsumer;
+               fLimit = builder.fLimit;
+               fTimeoutMs = builder.fTimeoutMs;
+               
+               fSent = 0;
+               fPretty = builder.fPretty;
+               fWithMeta = builder.fWithMeta;
+               fKafkaConsumerList = builder.fKafkaConsumerList;
+       
+                       
+                       
+               
+                       
+                               
+                               
+                               
+                               
+                               
+                       
+                               
+                       
+                                               
+                       
+                               
+                               
+                                       
+               
+       
+       }
+
+       /**
+        * 
+        * interface provides onWait and onMessage methods
+        *
+        */
+       public interface operation {
+               /**
+                * Call thread.sleep
+                * 
+                * @throws IOException
+                */
+               void onWait() throws IOException;
+
+               /**
+                * provides the output based in the consumer paramter
+                * 
+                * @param count
+                * @param msg
+                * @throws IOException
+                */
+               
+               void onMessage(int count, String msg, String transId, long offSet) throws IOException, JSONException;
+       }
+
+       /**
+        * 
+        * @return
+        */
+       public int getSentCount() {
+               return fSent;
+       }
+
+       @Override
+       /**
+        * 
+        * @param os
+        *            throws IOException
+        */
+       public void write(final OutputStream os) throws IOException {
+               
+       
+               
+               // synchronized(this){
+               os.write('[');
+               fSent = forEachMessage(new operation() {
+                       @Override
+                       public void onMessage(int count, String msg, String transId, long offSet)
+                                       throws IOException, JSONException {
+
+                               if (count > 0) {
+                                       os.write(',');
+                               }
+                               if (fWithMeta) {
+                                       final JSONObject entry = new JSONObject();
+                                       entry.put("offset", offSet);
+                                       entry.put("message", msg);
+                                       os.write(entry.toString().getBytes());
+                               } else {
+                                       
+                                               String jsonString = JSONObject.valueToString(msg);
+                                       os.write(jsonString.getBytes());
+                               }
+
+                               if (fPretty) {
+                                       os.write('\n');
+                               }
+
+                               String metricTopicname = AJSCPropertiesMap
+                                               .getProperty(CambriaConstants.msgRtr_prop, "metrics.send.cambria.topic");
+                               if (null == metricTopicname)
+                                       metricTopicname = "msgrtr.apinode.metrics.dmaap";
+                               if (!metricTopicname.equalsIgnoreCase(topic.getName())) {
+                                       try {
+                                               if (istransEnable && istransType) {
+                                                       // final String transactionId =
+                                                       
+                                                       
+                                                       StringBuilder consumerInfo = new StringBuilder();
+                                                       if (null != dmaapContext && null != dmaapContext.getRequest()) {
+                                                               final HttpServletRequest request = dmaapContext.getRequest();
+                                                               consumerInfo.append("consumerIp= \"" + request.getRemoteHost() + "\",");
+                                                               consumerInfo.append("consServerIp= \"" + request.getLocalAddr() + "\",");
+                                                               consumerInfo.append("consumerId= \"" + Utils.getUserApiKey(request) + "\",");
+                                                               consumerInfo.append("consumerGroup= \""
+                                                                               + getConsumerGroupFromRequest(request.getRequestURI()) + "\",");
+                                                               consumerInfo.append("consumeTime= \"" + Utils.getFormattedDate(new Date()) + "\",");
+                                                       }
+                                                       log.info("Consumer [" + consumerInfo.toString() + "transactionId= \"" + transId
+                                                                       + "\",messageLength= \"" + msg.length() + "\",topic= \"" + topic.getName() + "\"]");
+                                               }
+                                       } catch (Exception e) {
+                                       }
+                               }
+
+                       }
+
+                       @Override
+                       /**
+                        * 
+                        * It makes thread to wait
+                        * 
+                        * @throws IOException
+                        */
+                       public void onWait() throws IOException {
+                               os.flush(); // likely totally unnecessary for a network socket
+                               try {
+                                       // FIXME: would be good to wait/signal
+                                       Thread.sleep(100);
+                               } catch (InterruptedException e) {
+                                   Thread.currentThread().interrupt();
+                               }
+                       }
+               });
+
+               
+               if (null != dmaapContext && istransEnable && istransType) {
+
+                       dmaapContext.getResponse().setHeader("transactionId",
+                                       Utils.getResponseTransactionId(responseTransactionId));
+               }
+
+               os.write(']');
+               os.flush();
+
+               boolean close_out_stream = true;
+               String strclose_out_stream = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "close.output.stream");
+               if (null != strclose_out_stream)
+                       close_out_stream = Boolean.parseBoolean(strclose_out_stream);
+
+               
+               if (close_out_stream) {
+                       os.close();
+                       
+               }
+       }
+
+       /**
+        * 
+        * @param requestURI
+        * @return
+        */
+       private String getConsumerGroupFromRequest(String requestURI) {
+               if (null != requestURI && !requestURI.isEmpty()) {
+
+                       String consumerDetails = requestURI.substring(requestURI.indexOf("events/") + 7);
+
+                       int startIndex = consumerDetails.indexOf("/") + 1;
+                       int endIndex = consumerDetails.lastIndexOf("/");
+                       return consumerDetails.substring(startIndex, endIndex);
+               }
+               return null;
+       }
+
+       /**
+        * 
+        * @param op
+        * @return
+        * @throws IOException
+        * @throws JSONException
+        */
+       public int forEachMessage(operation op) throws IOException, JSONException {
+               final int effectiveLimit = (fLimit == 0 ? kTopLimit : fLimit);
+
+               int count = 0;
+               boolean firstPing = true;
+               // boolean isTransType=false;
+               final long startMs = System.currentTimeMillis();
+               final long timeoutMs = fTimeoutMs + startMs -500; //500 ms used in poll 
+
+               while (firstPing || (count == 0 && System.currentTimeMillis() < timeoutMs)) {
+                       if (!firstPing) {
+                               op.onWait();
+                       }
+                       firstPing = false;
+
+               
+                                Consumer.Message msgRecord = null;
+                                while (count < effectiveLimit && (msgRecord =
+                                fConsumer.nextMessage()) != null) {
+
+                               String message = "";
+                               String transactionid = "";
+                               try {
+                   // String msgRecord = msg;
+                                       JSONObject jsonMessage = new JSONObject(msgRecord);
+                                       String[] keys = JSONObject.getNames(jsonMessage);
+                                       boolean wrapheader1 = false;
+                                       boolean wrapheader2 = false;
+                                       boolean found_attr3 = false;
+                                       String wrapElement1 = "message";
+                                       String wrapElement2 = "msgWrapMR";
+                                       String transIdElement = "transactionId";
+                                       if (null != keys) {
+                                               for (String key : keys) {
+                                                       if (key.equals(wrapElement1)) {
+                                                               wrapheader1 = true;
+                                                       } else if (key.equals(wrapElement2)) {
+                                                               wrapheader2 = true;
+                                                       } else if (key.equals(transIdElement)) {
+                                                               found_attr3 = true;
+                                                               transactionid = jsonMessage.getString(key);
+                                                       }
+                                               }
+                                       }
+
+                                       // returns contents of attribute 1 if both attributes
+                                       // present, otherwise
+                                       // the whole msg
+                                       if (wrapheader2 && found_attr3) {
+                                               message = jsonMessage.getString(wrapElement2);
+                                       } else if (wrapheader1 && found_attr3) {
+                                               message = jsonMessage.getString(wrapElement1);
+                                       } else {
+                                               message = msgRecord.getMessage();
+                                       }
+                                       // jsonMessage = extractMessage(jsonMessage ,
+                                       // "message","msgWrapMR","transactionId");
+                                       istransType = true;
+                               } catch (JSONException e) { // This check is required for the
+                                                                                       // message sent by MR AAF flow but
+                                                                                       // consumed by UEB ACL flow which
+                                                                                       // wont expect transaction id in
+                                                                                       // cambria client api
+                                       // Ignore
+                                       log.info("JSON Exception logged when the message is non JSON Format");
+                               } catch (Exception exp) {
+                                       log.info("****Some Exception occured for writing messages in topic" + topic.getName()
+                                                       + "  Exception" + exp);
+                               }
+                               if (message == null || message.equals("")) {
+                                       istransType = false;
+                                       message = msgRecord.getMessage();
+                               }
+
+                               // If filters are enabled/set, message should be in JSON format
+                               // for filters to work for
+                               // otherwise filter will automatically ignore message in
+                               // non-json format.
+                               if (filterMatches(message)) {
+                                       op.onMessage(count, message, transactionid, msgRecord.getOffset());
+                                       count++;
+
+                               }
+
+                       }
+               }
+               return count;
+       }
+
+       
+
+       /**
+        * 
+        * Checks whether filter is initialized
+        */
+       
+               
+       
+
+       /**
+        * 
+        * @param msg
+        * @return
+        */
+       private boolean filterMatches(String msg) {
+               boolean result = true;
+               
+               
+                               
+                               
+                       
+                       
+                       
+                               
+                       
+                               
+               
+       
+
+               return result;
+       }
+
+       public DMaaPContext getDmaapContext() {
+               return dmaapContext;
+       }
+
+       public void setDmaapContext(DMaaPContext dmaapContext) {
+               this.dmaapContext = dmaapContext;
+       }
+
+       public Topic getTopic() {
+               return topic;
+       }
+
+       public void setTopic(Topic topic) {
+               this.topic = topic;
+       }
+
+       public void setTopicStyle(boolean aaftopic) {
+               this.isAAFTopic = aaftopic;
+       }
+
+       public void setTransEnabled(boolean transEnable) {
+               this.istransEnable = transEnable;
+       }
+
+       
+       private final Consumer fConsumer;
+       private final int fLimit;
+       private final int fTimeoutMs;
+       
+       private final boolean fPretty;
+       private final boolean fWithMeta;
+       private int fSent;
+
+       
+       private DMaaPContext dmaapContext;
+       private String responseTransactionId;
+       private Topic topic;
+       private boolean isAAFTopic = false;
+       private boolean istransEnable = false;
+       private ArrayList<Consumer> fKafkaConsumerList;
+       private boolean istransType = true;
+       // private static final Logger log =
+
+
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaOutboundEventStream.class);
+
+       public int getfLimit() {
+               return fLimit;
+       }
+
+       public int getfTimeoutMs() {
+               return fTimeoutMs;
+       }
+
+       public boolean isfPretty() {
+               return fPretty;
+       }
+
+       public boolean isfWithMeta() {
+               return fWithMeta;
+       }
+
+       public boolean isAAFTopic() {
+               return isAAFTopic;
+       }
+
+       public boolean isIstransEnable() {
+               return istransEnable;
+       }
+
+       public boolean isIstransType() {
+               return istransType;
+       }
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java b/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java
new file mode 100644 (file)
index 0000000..8278d54
--- /dev/null
@@ -0,0 +1,167 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.resources.streamReaders;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.Publisher.message;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+import org.onap.dmaap.dmf.mr.resources.CambriaEventSet.reader;
+
+import javax.servlet.http.HttpServletResponse;
+import java.io.InputStream;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public class CambriaJsonStreamReader implements reader {
+       private final JSONTokener fTokens;
+       private final boolean fIsList;
+       private long fCount;
+       private final String fDefPart;
+       public static final String kKeyField = "cambria.partition";
+
+       /**
+        * 
+        * @param is
+        * @param defPart
+        * @throws CambriaApiException
+        */
+       public CambriaJsonStreamReader(InputStream is, String defPart) throws CambriaApiException {
+               try {
+                       fTokens = new JSONTokener(is);
+                       fCount = 0;
+                       fDefPart = defPart;
+
+                       final int c = fTokens.next();
+                       if (c == '[') {
+                               fIsList = true;
+                       } else if (c == '{') {
+                               fTokens.back();
+                               fIsList = false;
+                       } else {
+                               throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expecting an array or an object.");
+                       }
+               } catch (JSONException e) {
+                       throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
+               }
+       }
+
+       @Override
+       public message next() throws CambriaApiException {
+               try {
+                       if (!fTokens.more()) {
+                               return null;
+                       }
+
+                       final int c = fTokens.next();
+                       
+                       
+                       if (fIsList) {
+                               if (c == ']' || (fCount > 0 && c == 10))
+                                       return null;
+
+
+                               if (fCount > 0 && c != ',' && c!= 10) {
+                                       throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+                                                       "Expected ',' or closing ']' after last object.");
+                               }
+
+                               if (fCount == 0 && c != '{' && c!= 10  && c!=32) {
+                                       throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected { to start an object.");
+                               }
+                       } else if (fCount != 0 || c != '{') {
+                               throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected '{' to start an object.");
+                       }
+
+                       if (c == '{') {
+                               fTokens.back();
+                       }
+                       final JSONObject o = new JSONObject(fTokens);
+                       fCount++;
+                       return new msg(o);
+               } catch (JSONException e) {
+                       throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
+
+               }
+       }
+
+       private class msg implements message {
+               private final String fKey;
+               private  String fMsg;
+               private LogDetails logDetails;
+               private boolean transactionEnabled;
+
+               /**
+                * constructor
+                * 
+                * @param o
+                */
+               
+               
+               
+               public msg(JSONObject o) {
+                       String key = o.optString(kKeyField, fDefPart);
+                       if (key == null) {
+                               key = "" + System.currentTimeMillis();
+                       }
+                       fKey = key;
+                                       
+                               fMsg = o.toString().trim();
+                       
+               }
+
+               @Override
+               public String getKey() {
+                       return fKey;
+               }
+
+               @Override
+               public String getMessage() {
+                       return fMsg;
+               }
+
+               @Override
+               public boolean isTransactionEnabled() {
+                       return transactionEnabled;
+               }
+
+               @Override
+               public void setTransactionEnabled(boolean transactionEnabled) {
+                       this.transactionEnabled = transactionEnabled;
+               }
+
+               @Override
+               public void setLogDetails(LogDetails logDetails) {
+                       this.logDetails = logDetails;
+               }
+
+               @Override
+               public LogDetails getLogDetails() {
+                       return logDetails;
+               }
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java b/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java
new file mode 100644 (file)
index 0000000..9f03f27
--- /dev/null
@@ -0,0 +1,142 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.resources.streamReaders;
+
+import com.att.nsa.util.StreamTools;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.Publisher.message;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+import org.onap.dmaap.dmf.mr.resources.CambriaEventSet.reader;
+
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * 
+ * This stream reader reads raw bytes creating a single message.
+ * @author peter
+ *
+ */
+public class CambriaRawStreamReader implements reader
+{
+       /**
+        * This is the constructor of CambriaRawStreamReader, it will basically the read from Input stream
+        * @param is
+        * @param defPart
+        * @throws CambriaApiException
+        */
+       public CambriaRawStreamReader ( InputStream is, String defPart ) throws CambriaApiException
+       {
+               fStream = is;
+               fDefPart = defPart;
+               fClosed = false;
+       }
+
+       @Override
+       /**
+        * 
+        * next() method reads the bytes and
+        * iterates through the messages 
+        * @throws CambriaApiException
+        * 
+        */
+       public message next () throws CambriaApiException
+       {
+               if ( fClosed ){
+                       return null;
+               }
+               
+               try
+               {
+                       final byte[] rawBytes = StreamTools.readBytes ( fStream );
+                       fClosed = true;
+                       return new message ()
+                       {
+                               private LogDetails logDetails;
+                               private boolean transactionEnabled;
+
+                               /**
+                                * returns boolean value which 
+                                * indicates whether transaction is enabled
+                                */
+                               public boolean isTransactionEnabled() {
+                                       return transactionEnabled;
+                               }
+
+                               /**
+                                * sets boolean value which 
+                                * indicates whether transaction is enabled
+                                */
+                               public void setTransactionEnabled(boolean transactionEnabled) {
+                                       this.transactionEnabled = transactionEnabled;
+                               }
+                               
+                               @Override
+                               /**
+                                * @returns key
+                                * It ch4ecks whether fDefPart value is Null.
+                                * If yes, it will return ystem.currentTimeMillis () else
+                                * it will return fDefPart variable value
+                                */
+                               public String getKey ()
+                               {
+                                       return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart;
+                               }
+
+                               @Override
+                               /**
+                                * returns the message in String type object
+                                */
+                               public String getMessage ()
+                               {
+                                       return new String ( rawBytes );
+                               }
+
+                               /**
+                                * set log details in logDetails variable
+                                */
+                               @Override
+                               public void setLogDetails(LogDetails logDetails) {
+                                       this.logDetails = logDetails;
+                               }
+
+                               @Override
+                               /**
+                                * get the log details
+                                */
+                               public LogDetails getLogDetails() {
+                                       return this.logDetails;
+                               }
+                       };
+               }
+               catch ( IOException e )
+               {
+                       throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
+               }
+       }
+       
+       private final InputStream fStream;
+       private final String fDefPart;
+       private boolean fClosed;
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaStreamReader.java b/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaStreamReader.java
new file mode 100644 (file)
index 0000000..d786804
--- /dev/null
@@ -0,0 +1,228 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.resources.streamReaders;
+
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.Publisher.message;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+import org.onap.dmaap.dmf.mr.resources.CambriaEventSet.reader;
+
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * Read an optionally chunked stream in the Cambria app format. This format
+ * allows for speedier server-side message parsing than pure JSON. It's looks
+ * like:<br/>
+ * <br/>
+ * &lt;keyLength&gt;.&lt;msgLength&gt;.&lt;key&gt;&lt;message&gt;<br/>
+ * <br/>
+ * Whitespace before/after each entry is ignored, so messages can be delivered
+ * with newlines between them, or not.
+ * 
+ * @author peter
+ *
+ */
+public class CambriaStreamReader implements reader {
+       /**
+        * constructor initializing InputStream with fStream
+        * 
+        * @param senderStream
+        * @throws CambriaApiException
+        */
+       public CambriaStreamReader(InputStream senderStream) throws CambriaApiException {
+               fStream = senderStream;
+       }
+
+       @Override
+       /**
+        * next method iterates through msg length
+        * throws IOException
+        * throws CambriaApiException
+        * 
+        */
+       public message next() throws IOException, CambriaApiException {
+               final int keyLen = readLength();
+               if (keyLen == -1)
+                       return null;
+
+               final int msgLen = readLength();
+               final String keyPart = readString(keyLen);
+               final String msgPart = readString(msgLen);
+
+               return new msg(keyPart, msgPart);
+       }
+
+       private static class msg implements message {
+               /**
+                * constructor initialization
+                * 
+                * @param key
+                * @param msg
+                */
+               public msg(String key, String msg) {
+                       // if no key, use the current time. This allows the message to be
+                       // delivered
+                       // in any order without forcing it into a single partition as empty
+                       // string would.
+                       if (key.length() < 1) {
+                               key = "" + System.currentTimeMillis();
+                       }
+
+                       fKey = key;
+                       fMsg = msg;
+               }
+
+               @Override
+               /**
+                * @returns fkey
+                */
+               public String getKey() {
+                       return fKey;
+               }
+
+               @Override
+               /**
+                * returns the message in String type object
+                */
+               public String getMessage() {
+                       return fMsg;
+               }
+
+               private final String fKey;
+               private final String fMsg;
+               private LogDetails logDetails;
+               private boolean transactionEnabled;
+               
+               /**
+                * returns boolean value which 
+                * indicates whether transaction is enabled
+                */
+               public boolean isTransactionEnabled() {
+                       return transactionEnabled;
+               }
+               
+               /**
+                * sets boolean value which 
+                * indicates whether transaction is enabled
+                */
+               public void setTransactionEnabled(boolean transactionEnabled) {
+                       this.transactionEnabled = transactionEnabled;
+               }
+
+               @Override
+               /**
+                * set log details in logDetails variable
+                */
+               public void setLogDetails(LogDetails logDetails) {
+                       this.logDetails = logDetails;
+               }
+
+               @Override
+               /**
+                * get the log details
+                */
+               public LogDetails getLogDetails() {
+                       return this.logDetails;
+               }
+
+       }
+
+       private final InputStream fStream;
+
+       /**
+        * max cambria length indicates message length
+        
+       // This limit is here to prevent the server from spinning on a long string of numbers
+    // that is delivered with 'application/cambria' as the format. The limit needs to be
+    // large enough to support the max message length (currently 1MB, the default Kafka
+    // limit)
+    * */
+     
+    private static final int kMaxCambriaLength = 4*1000*1024;
+
+
+       /**
+        * 
+        * @return
+        * @throws IOException
+        * @throws CambriaApiException
+        */
+       private int readLength() throws IOException, CambriaApiException {
+               // always ignore leading whitespace
+               int c = fStream.read();
+               while (Character.isWhitespace(c)) {
+                       c = fStream.read();
+               }
+
+               if (c == -1) {
+                       return -1;
+               }
+
+               int result = 0;
+               while (Character.isDigit(c)) {
+                       result = (result * 10) + (c - '0');
+                       if (result > kMaxCambriaLength) {
+                               throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length.");
+                       }
+                       c = fStream.read();
+               }
+
+               if (c != '.') {
+                       throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length.");
+               }
+
+               return result;
+       }
+
+       /**
+        * 
+        * @param len
+        * @return
+        * @throws IOException
+        * @throws CambriaApiException
+        */
+       private String readString(int len) throws IOException, CambriaApiException {
+               final byte[] buffer = new byte[len];
+
+               final long startMs = System.currentTimeMillis();
+               final long timeoutMs = startMs + 30000; // FIXME configurable
+
+               int readTotal = 0;
+               while (readTotal < len) {
+                       final int read = fStream.read(buffer, readTotal, len - readTotal);
+                       if (read == -1 || System.currentTimeMillis() > timeoutMs) {
+                               // EOF
+                               break;
+                       }
+                       readTotal += read;
+               }
+
+               if (readTotal < len) {
+                       throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+                                       "End of stream while reading " + len + " bytes");
+               }
+
+               return new String(buffer);
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java b/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java
new file mode 100644 (file)
index 0000000..40e1639
--- /dev/null
@@ -0,0 +1,141 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.resources.streamReaders;
+
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.Publisher.message;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+import org.onap.dmaap.dmf.mr.resources.CambriaEventSet.reader;
+
+import javax.servlet.http.HttpServletResponse;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
+/**
+ * This stream reader just pulls single lines. It uses the default partition if provided. If
+ * not, the key is the current time, which does not guarantee ordering.
+ * 
+ * @author peter
+ *
+ */
+public class CambriaTextStreamReader implements reader
+{
+       /**
+        * This is the constructor for Cambria Text Reader format
+        * @param is
+        * @param defPart
+        * @throws CambriaApiException
+        */
+       public CambriaTextStreamReader ( InputStream is, String defPart ) throws CambriaApiException
+       {
+               fReader = new BufferedReader ( new InputStreamReader ( is ) );
+               fDefPart = defPart;
+       }
+
+       @Override
+       /**
+        * next() method iterates through msg length
+        * throws IOException
+        * throws CambriaApiException
+        * 
+        */ 
+       public message next () throws CambriaApiException
+       {
+               try
+               {
+                       final String line = fReader.readLine ();
+                       if ( line == null ) {
+                               return null;
+                       }
+                       
+                       return new message ()
+                       {
+                               private LogDetails logDetails;
+                               private boolean transactionEnabled;
+
+                               /**
+                                * returns boolean value which 
+                                * indicates whether transaction is enabled
+                                * @return
+                                */
+                               public boolean isTransactionEnabled() {
+                                       return transactionEnabled;
+                               }
+
+                               /**
+                                * sets boolean value which 
+                                * indicates whether transaction is enabled
+                                */
+                               public void setTransactionEnabled(boolean transactionEnabled) {
+                                       this.transactionEnabled = transactionEnabled;
+                               }
+                               
+                               @Override
+                               /**
+                                * @returns key
+                                * It ch4ecks whether fDefPart value is Null.
+                                * If yes, it will return ystem.currentTimeMillis () else
+                                * it will return fDefPart variable value
+                                */
+                               public String getKey ()
+                               {
+                                       return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart;
+                               }
+
+                               @Override
+                               /**
+                                * returns the message in String type object
+                                * @return
+                                */
+                               public String getMessage ()
+                               {
+                                       return line;
+                               }
+
+                               @Override
+                               /**
+                                * set log details in logDetails variable
+                                */
+                               public void setLogDetails(LogDetails logDetails) {
+                                       this.logDetails = logDetails;
+                               }
+
+                               @Override
+                               /**
+                                * get the log details
+                                */
+                               public LogDetails getLogDetails() {
+                                       return this.logDetails;
+                               }
+                       };
+               }
+               catch ( IOException e )
+               {
+                       throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
+               }
+       }
+       
+       private final BufferedReader fReader;
+       private final String fDefPart;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticator.java b/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticator.java
new file mode 100644 (file)
index 0000000..9b3a602
--- /dev/null
@@ -0,0 +1,36 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.security;
+
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+
+import javax.servlet.http.HttpServletRequest;
+
+/**
+ * 
+ * @author sneha.d.desai
+ *
+ */
+public interface DMaaPAAFAuthenticator {
+       boolean aafAuthentication( HttpServletRequest req , String role);
+       String aafPermissionString(String permission, String action) throws CambriaApiException;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java
new file mode 100644 (file)
index 0000000..917a74d
--- /dev/null
@@ -0,0 +1,77 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.security;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+
+import javax.servlet.http.HttpServletRequest;
+
+/**
+ * 
+ * @author sneha.d.desai
+ *
+ */
+public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator {
+
+       private static final String NAMESPACE_PROPERTY = "defaultNSforUEB";
+       private static final String DEFAULT_NAMESPACE = "org.onap.dmaap.mr";
+       private static final String NAMESPACE_PREFIX = "org.onap";
+       private static final String NAMESPACE_PREFIX_VAR = "namespacePrefix";
+       private static final String DEFAULT_NAMESPACE_VAR = "defaultNamespace";
+       private static final String INSTANCE_PART_VAR = "pubSubInstPart";
+
+       /**
+        * @param req
+        * @param role
+        */
+       @Override
+       public boolean aafAuthentication(HttpServletRequest req, String role) {
+               return req.isUserInRole(role);
+       }
+
+       @Override
+       public String aafPermissionString(String topicName, String action) throws CambriaApiException {
+
+               String nameSpace = topicName.startsWith(
+                               System.getenv(NAMESPACE_PREFIX_VAR) != null ? System.getenv(NAMESPACE_PREFIX_VAR) : NAMESPACE_PREFIX)
+                                               ? parseNamespace(topicName) : readNamespaceFromProperties();
+
+               nameSpace = !nameSpace.isEmpty() ? nameSpace
+                               : (System.getenv(DEFAULT_NAMESPACE_VAR) != null ? System.getenv(DEFAULT_NAMESPACE_VAR)
+                                               : DEFAULT_NAMESPACE);
+
+               return new StringBuilder(nameSpace).append(
+                               (System.getenv(INSTANCE_PART_VAR) != null ? System.getenv(INSTANCE_PART_VAR) : ".topic") + "|:topic.")
+                               .append(topicName).append("|").append(action).toString();
+       }
+
+       String readNamespaceFromProperties() {
+               return AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, NAMESPACE_PROPERTY);
+       }
+
+       private String parseNamespace(String topicName) {
+               return topicName.substring(0, topicName.lastIndexOf('.'));
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticator.java b/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticator.java
new file mode 100644 (file)
index 0000000..3b15c55
--- /dev/null
@@ -0,0 +1,61 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.security;
+
+import com.att.nsa.security.NsaApiKey;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+
+import javax.servlet.http.HttpServletRequest;
+
+
+/**
+ * An interface for authenticating an inbound request.
+ * @author nilanjana.maity
+ *
+ * @param <K> NsaApiKey
+ */
+public interface DMaaPAuthenticator<K extends NsaApiKey> {
+
+       /**
+        * Qualify a request as possibly using the authentication method that this class implements.
+        * @param req
+        * @return true if the request might be authenticated by this class
+        */
+       boolean qualify ( HttpServletRequest req );
+       
+       /**
+        * Check for a request being authentic. If it is, return the API key. If not, return null.
+        * @param req An inbound web request
+        * @return the API key for an authentic request, or null
+        */
+       K isAuthentic ( HttpServletRequest req );
+       /**
+        * Check for a ctx being authenticate. If it is, return the API key. If not, return null.
+        * @param ctx
+        * @return the API key for an authentication request, or null
+        */
+       K authenticate ( DMaaPContext ctx );
+       
+       
+       void addAuthenticator(DMaaPAuthenticator<K> a);
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticatorImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticatorImpl.java
new file mode 100644 (file)
index 0000000..0fd1a5c
--- /dev/null
@@ -0,0 +1,132 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.security;
+
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator;
+
+import javax.servlet.http.HttpServletRequest;
+import java.util.LinkedList;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ * @param <K>
+ */
+public class DMaaPAuthenticatorImpl<K extends NsaApiKey> implements DMaaPAuthenticator<K> {
+
+       private final LinkedList<DMaaPAuthenticator<K>> fAuthenticators;
+       
+
+
+       // Setting timeout to a large value for testing purpose.
+       
+       // 10 minutes
+       private static final long kDefaultRequestTimeWindow = 1000L * 60 * 10 * 10 * 10 * 10 * 10;
+
+       /**
+        * Construct the security manager against an API key database
+        * 
+        * @param db
+        *            the API key db
+        */
+       public DMaaPAuthenticatorImpl(NsaApiDb<K> db) {
+               this(db, kDefaultRequestTimeWindow);
+       }
+
+       
+       
+       
+       /**
+        * Construct the security manager against an API key database with a
+        * specific request time window size
+        * 
+        * @param db
+        *            the API key db
+        * @param authTimeWindowMs
+        *            the size of the time window for request authentication
+        */
+       public DMaaPAuthenticatorImpl(NsaApiDb<K> db, long authTimeWindowMs) {
+               fAuthenticators = new LinkedList<>();
+
+               fAuthenticators.add(new DMaaPOriginalUebAuthenticator<K>(db, authTimeWindowMs));
+       }
+
+       /**
+        * Authenticate a user's request. This method returns the API key if the
+        * user is authentic, null otherwise.
+        * 
+        * @param ctx
+        * @return an api key record, or null
+        */
+       public K authenticate(DMaaPContext ctx) {
+               final HttpServletRequest req = ctx.getRequest();
+               for (DMaaPAuthenticator<K> a : fAuthenticators) {
+                       if (a.qualify(req)) {
+                               final K k = a.isAuthentic(req);
+                               if (k != null)
+                                       return k;
+                       }
+                       // else: this request doesn't look right to the authenticator
+               }
+               return null;
+       }
+
+       /**
+        * Get the user associated with the incoming request, or null if the user is
+        * not authenticated.
+        * 
+        * @param ctx
+        * @return
+        */
+       public static NsaSimpleApiKey getAuthenticatedUser(DMaaPContext ctx) {
+               final DMaaPAuthenticator<NsaSimpleApiKey> m = ctx.getConfigReader().getfSecurityManager();
+               return m.authenticate(ctx);
+       }
+
+       /**
+        * method by default returning false
+        * @param req
+        * @return false
+        */
+       public boolean qualify(HttpServletRequest req) {
+               return false;
+       }
+/**
+ * method by default returning null
+ * @param req
+ * @return null
+ */
+       public K isAuthentic(HttpServletRequest req) {
+               return null;
+       }
+       
+       public void addAuthenticator ( DMaaPAuthenticator<K> a )
+       {
+               this.fAuthenticators.add(a);
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java b/src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java
new file mode 100644 (file)
index 0000000..dbaad26
--- /dev/null
@@ -0,0 +1,86 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.security.impl;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.authenticators.MechIdAuthenticator;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator;
+
+import javax.servlet.http.HttpServletRequest;
+
+/**
+ * An authenticator for AT&T MechIds.
+ * 
+ * @author peter
+ *
+ * @param <K>
+ */
+public class DMaaPMechIdAuthenticator <K extends NsaApiKey> implements DMaaPAuthenticator<K> {
+
+/**
+ * This is not yet implemented. by refault its returing false
+ * @param req HttpServletRequest
+ * @return false
+ */
+       public boolean qualify (HttpServletRequest req) {
+               // we haven't implemented anything here yet, so there's no qualifying request
+               return false;
+       }
+/**
+ * This metod authenticate the mech id 
+ * @param req
+ * @return APIkey or null
+ */
+       public K isAuthentic (HttpServletRequest req) {
+               final String remoteAddr = req.getRemoteAddr();
+               authLog ( "MechId auth is not yet implemented.", remoteAddr );
+               return null;
+       }
+
+       private static void authLog ( String msg, String remoteAddr )
+       {
+               log.info ( "AUTH-LOG(" + remoteAddr + "): " + msg );
+       }
+
+
+       //private static final Logger log = Logger.getLogger( MechIdAuthenticator.class.toString());
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(MechIdAuthenticator.class);
+/**
+ * Curently its not yet implemented returning null
+ * @param ctx DMaaP context
+ * @return APIkey or null
+ */
+       @Override
+       public K authenticate(DMaaPContext ctx) {
+               // TODO Auto-generated method stub
+               return null;
+       }
+@Override
+public void addAuthenticator(DMaaPAuthenticator<K> a) {
+       // TODO Auto-generated method stub
+       
+}
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java b/src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java
new file mode 100644 (file)
index 0000000..aa116e8
--- /dev/null
@@ -0,0 +1,292 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.security.impl;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.till.data.sha1HmacSigner;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.db.NsaApiDb;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator;
+
+import javax.servlet.http.HttpServletRequest;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+/**
+ * This authenticator handles an AWS-like authentication, originally used by the
+ * Cambria server (the API server for UEB).
+ * 
+ * @author peter
+ *
+ * @param <K>
+ */
+public class DMaaPOriginalUebAuthenticator<K extends NsaApiKey> implements DMaaPAuthenticator<K> {
+       /**
+        * constructor initialization
+        * 
+        * @param db
+        * @param requestTimeWindowMs
+        */
+       public DMaaPOriginalUebAuthenticator(NsaApiDb<K> db, long requestTimeWindowMs) {
+               fDb = db;
+               fRequestTimeWindowMs = requestTimeWindowMs;
+               
+
+               
+
+       }
+
+       @Override
+       public boolean qualify(HttpServletRequest req) {
+               // accept anything that comes in with X-(Cambria)Auth in the header
+               final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" });
+               return xAuth != null;
+       }
+
+       /**
+        * method for authentication
+        * 
+        * @param req
+        * @return
+        */
+       public K isAuthentic(HttpServletRequest req) {
+               final String remoteAddr = req.getRemoteAddr();
+               // Cambria originally used "Cambria..." headers, but as the API key
+               // system is now more
+               // general, we take either form.
+               final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" });
+               final String xDate = getFirstHeader(req, new String[] { "X-CambriaDate", "X-Date" });
+
+               final String httpDate = req.getHeader("Date");
+
+               final String xNonce = getFirstHeader(req, new String[] { "X-Nonce" });
+               return authenticate(remoteAddr, xAuth, xDate, httpDate, xNonce);
+       }
+
+       /**
+        * Authenticate a user's request. This method returns the API key if the
+        * user is authentic, null otherwise.
+        * 
+        * @param remoteAddr
+        * @param xAuth
+        * @param xDate
+        * @param httpDate
+        * @param nonce
+        * @return an api key record, or null
+        */
+       public K authenticate(String remoteAddr, String xAuth, String xDate, String httpDate, String nonce) {
+               if (xAuth == null) {
+                       authLog("No X-Auth header on request", remoteAddr);
+                       return null;
+               }
+               
+               final String[] xAuthParts = xAuth.split(":");
+               if (xAuthParts.length != 2) {
+                       authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr);
+                       return null;
+               }
+
+
+               // get the api key and signature
+               final String clientApiKey = xAuthParts[0];
+               final String clientApiHash = xAuthParts[1];
+               if (clientApiKey.length() == 0 || clientApiHash.length() == 0) {
+                       authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr);
+                       return null;
+               }
+               // if the user provided X-Date, use that. Otherwise, go for Date
+               final String dateString = xDate != null ? xDate : httpDate;
+               final Date clientDate = getClientDate(dateString);
+               if (clientDate == null) {
+                       authLog("Couldn't parse client date '" + dateString + "'. Preferring X-Date over Date.", remoteAddr);
+                       return null;
+               }
+               // check the time range
+               final long nowMs = System.currentTimeMillis();
+               final long diffMs = Math.abs(nowMs - clientDate.getTime());
+               if (diffMs > fRequestTimeWindowMs) {
+                       authLog("Client date is not in acceptable range of server date. Client:" + clientDate.getTime()
+                                       + ", Server: " + nowMs + ", Threshold: " + fRequestTimeWindowMs + ".", remoteAddr);
+                       return null;
+               }
+               K apiRecord;
+               try {
+                       apiRecord = fDb.loadApiKey(clientApiKey);
+                       if (apiRecord == null) {
+                               authLog("No such API key " + clientApiKey, remoteAddr);
+                               return null;
+                       }
+               } catch (ConfigDbException e) {
+                       authLog("Couldn't load API key " + clientApiKey + ": " + e.getMessage(), remoteAddr);
+                       return null;
+               }
+                               // make the signed content
+               final StringBuilder sb = new StringBuilder();
+               sb.append(dateString);
+               if (nonce != null) {
+                       sb.append(":");
+                       sb.append(nonce);
+               }
+               final String signedContent = sb.toString();
+               // now check the signed date string
+               final String serverCalculatedSignature = sha1HmacSigner.sign(signedContent, apiRecord.getSecret());
+               if (serverCalculatedSignature == null || !serverCalculatedSignature.equals(clientApiHash)) {
+                       authLog("Signatures don't match. Rec'd " + clientApiHash + ", expect " + serverCalculatedSignature + ".",
+                                       remoteAddr);
+                       return null;
+               }
+               authLog("authenticated " + apiRecord.getKey(), remoteAddr);
+               return apiRecord;
+       }
+
+       /**
+        * Get the first value of the first existing header from the headers list
+        * 
+        * @param req
+        * @param headers
+        * @return a header value, or null if none exist
+        */
+       private static String getFirstHeader(HttpServletRequest req, String[] headers) {
+               for (String header : headers) {
+                       final String result = req.getHeader(header);
+                       if (result != null)
+                               return result;
+               }
+               return null;
+       }
+
+       /**
+        * Parse the date string into a Date using one of the supported date
+        * formats.
+        * 
+        * @param dateHeader
+        * @return a date, or null
+        */
+       private static Date getClientDate(String dateString) {
+               if (dateString == null) {
+                       return null;
+               }
+
+               // parse the date
+               Date result = null;
+               for (String dateFormat : kDateFormats) {
+                       final SimpleDateFormat parser = new SimpleDateFormat(dateFormat, java.util.Locale.US);
+                       if (!dateFormat.contains("z") && !dateFormat.contains("Z")) {
+                               parser.setTimeZone(TIMEZONE_GMT);
+                       }
+
+                       try {
+                               result = parser.parse(dateString);
+                               break;
+                       } catch (ParseException e) {
+                               // presumably wrong format
+                       }
+               }
+               return result;
+       }
+
+       private static void authLog(String msg, String remoteAddr) {
+               log.info("AUTH-LOG(" + remoteAddr + "): " + msg);
+       }
+
+       private final NsaApiDb<K> fDb;
+       private final long fRequestTimeWindowMs;
+
+       private static final java.util.TimeZone TIMEZONE_GMT = java.util.TimeZone.getTimeZone("GMT");
+       
+       private static final String kDateFormats[] =
+               {
+                   // W3C date format (RFC 3339).
+                   "yyyy-MM-dd'T'HH:mm:ssz",
+                   "yyyy-MM-dd'T'HH:mm:ssXXX",         // as of Java 7, reqd to handle colon in TZ offset
+
+                   // Preferred HTTP date format (RFC 1123).
+                   "EEE, dd MMM yyyy HH:mm:ss zzz",
+
+                   // simple unix command line 'date' format
+                   "EEE MMM dd HH:mm:ss z yyyy",
+
+                   // Common date format (RFC 822).
+                   "EEE, dd MMM yy HH:mm:ss z",
+                   "EEE, dd MMM yy HH:mm z",
+                   "dd MMM yy HH:mm:ss z",
+                   "dd MMM yy HH:mm z",
+
+                       // Obsoleted HTTP date format (ANSI C asctime() format).
+                   "EEE MMM dd HH:mm:ss yyyy",
+
+                   // Obsoleted HTTP date format (RFC 1036).
+                   "EEEE, dd-MMM-yy HH:mm:ss zzz",
+               };
+
+       
+                       
+                       
+
+               
+                       
+
+                       
+                       
+
+                       
+                       
+
+                       
+                       
+
+                       
+                       
+       // logger declaration
+       
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPOriginalUebAuthenticator.class);
+       @Override
+
+               // TODO Auto-generated method stub
+               
+       //}
+       
+       public K authenticate(DMaaPContext ctx) {
+               
+               
+               
+                       
+                               
+                               
+                                       
+                       
+                       
+               
+               return null;
+       }
+
+
+       public void addAuthenticator ( DMaaPAuthenticator<K> a )
+       {
+               
+       }
+       
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/AdminService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/AdminService.java
new file mode 100644 (file)
index 0000000..855d4a1
--- /dev/null
@@ -0,0 +1,82 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import org.json.JSONException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+
+import java.io.IOException;
+
+/**
+ * @author muzainulhaque.qazi
+ *
+ */
+public interface AdminService {
+       /**
+        * method provide consumerCache
+        * 
+        * @param dMaaPContext
+        * @throws IOException
+        */
+       void showConsumerCache(DMaaPContext dMaaPContext) throws IOException,AccessDeniedException;
+
+       /**
+        * method drops consumer cache
+        * 
+        * @param dMaaPContext
+        * @throws JSONException
+        * @throws IOException
+        */
+       void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException,AccessDeniedException;
+       
+       
+       /**
+        * Get list of blacklisted ips 
+        * @param dMaaPContext context
+        * @throws IOException ex
+        * @throws AccessDeniedException ex
+        */
+       void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException;
+       
+       /**
+        * Add ip to blacklist
+        * @param dMaaPContext context
+        * @param ip ip
+        * @throws IOException ex
+        * @throws ConfigDbException ex
+        * @throws AccessDeniedException ex
+        */
+       void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException;
+       
+       /**
+        * Remove ip from blacklist
+        * @param dMaaPContext context
+        * @param ip ip
+        * @throws IOException ex
+        * @throws ConfigDbException ex
+        * @throws AccessDeniedException ex
+        */
+       void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException;
+       
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/ApiKeysService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/ApiKeysService.java
new file mode 100644 (file)
index 0000000..4b79f5e
--- /dev/null
@@ -0,0 +1,105 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
+import org.onap.dmaap.dmf.mr.beans.ApiKeyBean;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+
+import java.io.IOException;
+
+/**
+ * Declaring all the method in interface that is mainly used for authentication
+ * purpose.
+ *
+ *
+ */
+
+public interface ApiKeysService {
+       /**
+        * This method declaration for getting all ApiKey that has generated on
+        * server.
+        * 
+        * @param dmaapContext
+        * @throws ConfigDbException
+        * @throws IOException
+        */
+
+       public void getAllApiKeys(DMaaPContext dmaapContext)
+                       throws ConfigDbException, IOException;
+
+       /**
+        * Getting information about specific ApiKey
+        * 
+        * @param dmaapContext
+        * @param apikey
+        * @throws ConfigDbException
+        * @throws IOException
+        */
+
+       public void getApiKey(DMaaPContext dmaapContext, String apikey)
+                       throws ConfigDbException, IOException;
+
+       /**
+        * Thid method is used for create a particular ApiKey
+        * 
+        * @param dmaapContext
+        * @param nsaApiKey
+        * @throws KeyExistsException
+        * @throws ConfigDbException
+        * @throws IOException
+        */
+
+       public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey)
+                       throws KeyExistsException, ConfigDbException, IOException;
+
+       /**
+        * This method is used for update ApiKey that is already generated on
+        * server.
+        * 
+        * @param dmaapContext
+        * @param apikey
+        * @param nsaApiKey
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws AccessDeniedException
+        * @throws AccessDeniedException
+        */
+       public void updateApiKey(DMaaPContext dmaapContext, String apikey,
+                       ApiKeyBean nsaApiKey) throws ConfigDbException, IOException,AccessDeniedException
+                       ;
+
+       /**
+        * This method is used for delete specific ApiKey
+        * 
+        * @param dmaapContext
+        * @param apikey
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws AccessDeniedException
+        */
+
+       public void deleteApiKey(DMaaPContext dmaapContext, String apikey)
+                       throws ConfigDbException, IOException,AccessDeniedException;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/EventsService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/EventsService.java
new file mode 100644 (file)
index 0000000..9f4e406
--- /dev/null
@@ -0,0 +1,75 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public interface EventsService {
+       /**
+        * 
+        * @param ctx
+        * @param topic
+        * @param consumerGroup
+        * @param clientId
+        * @throws ConfigDbException
+        * @throws TopicExistsException
+        * @throws AccessDeniedException
+        * @throws UnavailableException
+        * @throws CambriaApiException
+        * @throws IOException
+        */
+       public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
+                       throws ConfigDbException, TopicExistsException,UnavailableException,
+                       CambriaApiException, IOException,AccessDeniedException;
+
+       /**
+        * 
+        * @param ctx
+        * @param topic
+        * @param msg
+        * @param defaultPartition
+        * @param requestTime
+        * @throws ConfigDbException
+        * @throws AccessDeniedException
+        * @throws TopicExistsException
+        * @throws CambriaApiException
+        * @throws IOException
+        */
+       public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+                       final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+                                       CambriaApiException, IOException,missingReqdSetting;
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/MMService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/MMService.java
new file mode 100644 (file)
index 0000000..5aa1ec7
--- /dev/null
@@ -0,0 +1,65 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * Contains the logic for executing calls to the Mirror Maker agent tool.
+ * 
+ *
+ * @since May 25, 2016
+ */
+
+public interface MMService {
+
+       /*
+        * this method calls the add white list method of a Mirror Maker agent API
+        */
+       public void addWhiteList();
+       
+       /*
+        * this method calls the remove white list method of a Mirror Maker agent API
+        */
+       public void removeWhiteList();
+       
+       /*
+        * This method calls the list white list method of a Mirror Maker agent API
+        */
+       public void listWhiteList();
+       
+       public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId) throws ConfigDbException, TopicExistsException, 
+               AccessDeniedException, UnavailableException, CambriaApiException, IOException;
+       
+       public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+                       final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+                       CambriaApiException, IOException, missingReqdSetting;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/MetricsService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/MetricsService.java
new file mode 100644 (file)
index 0000000..782a32a
--- /dev/null
@@ -0,0 +1,55 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service;
+
+/**
+ * @author amol.ramesh.dalne
+ *
+ */
+
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+
+import java.io.IOException;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public interface MetricsService {
+       /**
+        * 
+        * @param ctx
+        * @throws IOException
+        */
+       public void get(DMaaPContext ctx) throws IOException;
+
+       /**
+        * 
+        * @param ctx
+        * @param name
+        * @throws IOException
+        * @throws CambriaApiException
+        */
+       public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/TopicService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/TopicService.java
new file mode 100644 (file)
index 0000000..edcf1a7
--- /dev/null
@@ -0,0 +1,175 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import org.json.JSONException;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException;
+
+import java.io.IOException;
+
+/**
+ * interface provide all the topic related operations
+ * 
+ * @author anowarul.islam
+ *
+ */
+public interface TopicService {
+       /**
+        * method fetch details of all the topics
+        * 
+        * @param dmaapContext
+        * @throws JSONException
+        * @throws ConfigDbException
+        * @throws IOException
+        */
+       void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException;
+       void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException;
+
+       /**
+        * method fetch details of specific topic
+        * 
+        * @param dmaapContext
+        * @param topicName
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws TopicExistsException
+        */
+       void getTopic(DMaaPContext dmaapContext, String topicName)
+                       throws ConfigDbException, IOException, TopicExistsException;
+
+       /**
+        * method used to create the topic
+        * 
+        * @param dmaapContext
+        * @param topicBean
+        * @throws CambriaApiException
+        * @throws TopicExistsException
+        * @throws IOException
+        * @throws AccessDeniedException
+        * @throws JSONException 
+        */
+
+       void createTopic(DMaaPContext dmaapContext, TopicBean topicBean)
+                       throws CambriaApiException, TopicExistsException, IOException, AccessDeniedException;
+
+       /**
+        * method used to delete to topic
+        * 
+        * @param dmaapContext
+        * @param topicName
+        * @throws IOException
+        * @throws AccessDeniedException
+        * @throws ConfigDbException
+        * @throws CambriaApiException
+        * @throws TopicExistsException
+        */
+
+       void deleteTopic(DMaaPContext dmaapContext, String topicName)
+                       throws IOException, AccessDeniedException, ConfigDbException, CambriaApiException, TopicExistsException;
+
+       /**
+        * method provides list of all the publishers associated with a topic
+        * 
+        * @param dmaapContext
+        * @param topicName
+        * @throws IOException
+        * @throws ConfigDbException
+        * @throws TopicExistsException
+        */
+       void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName)
+                       throws IOException, ConfigDbException, TopicExistsException;
+
+       /**
+        * method provides details of all the consumer associated with a specific
+        * topic
+        * 
+        * @param dmaapContext
+        * @param topicName
+        * @throws IOException
+        * @throws ConfigDbException
+        * @throws TopicExistsException
+        */
+       void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName)
+                       throws IOException, ConfigDbException, TopicExistsException;
+
+       /**
+        * method provides publishing right to a specific topic
+        * 
+        * @param dmaapContext
+        * @param topicName
+        * @param producerId
+        * @throws AccessDeniedException
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws TopicExistsException
+        */
+       void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
+                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
+
+       /**
+        * method denies any specific publisher from a topic
+        * 
+        * @param dmaapContext
+        * @param topicName
+        * @param producerId
+        * @throws AccessDeniedException
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws TopicExistsException
+        */
+       void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
+                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
+
+       /**
+        * method provide consuming right to a specific user on a topic
+        * 
+        * @param dmaapContext
+        * @param topicName
+        * @param consumerId
+        * @throws AccessDeniedException
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws TopicExistsException
+        */
+       void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
+                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
+
+       /**
+        * method denies a particular user's consuming right on a topic
+        * 
+        * @param dmaapContext
+        * @param topicName
+        * @param consumerId
+        * @throws AccessDeniedException
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws TopicExistsException
+        */
+       void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
+                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/TransactionService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/TransactionService.java
new file mode 100644 (file)
index 0000000..c318e5a
--- /dev/null
@@ -0,0 +1,61 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service;
+
+import com.att.aft.dme2.internal.jettison.json.JSONException;
+import com.att.nsa.configs.ConfigDbException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.transaction.TransactionObj;
+
+import java.io.IOException;
+
+/**
+ * 
+ * @author anowarul.islam
+ *
+ */
+public interface TransactionService {
+       /**
+        * 
+        * @param trnObj
+        */
+       void checkTransaction(TransactionObj trnObj);
+
+       /**
+        * 
+        * @param dmaapContext
+        * @throws ConfigDbException
+        * @throws IOException
+        */
+       void getAllTransactionObjs(DMaaPContext dmaapContext) throws ConfigDbException, IOException;
+
+       /**
+        * 
+        * @param dmaapContext
+        * @param transactionId
+        * @throws ConfigDbException
+        * @throws JSONException
+        * @throws IOException
+        */
+       void getTransactionObj(DMaaPContext dmaapContext, String transactionId)
+                       throws ConfigDbException, JSONException, IOException;
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/UIService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/UIService.java
new file mode 100644 (file)
index 0000000..2edb42d
--- /dev/null
@@ -0,0 +1,92 @@
+/**
+ * 
+ */
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service;
+
+import com.att.nsa.configs.ConfigDbException;
+import org.apache.kafka.common.errors.TopicExistsException;
+import org.json.JSONException;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+
+import java.io.IOException;
+
+/**
+ * @author muzainulhaque.qazi
+ *
+ */
+public interface UIService {
+       /**
+        * Returning template of hello page.
+        * 
+        * @param dmaapContext
+        * @throws IOException
+        */
+       void hello(DMaaPContext dmaapContext) throws IOException;
+
+       /**
+        * Fetching list of all api keys and returning in a templated form for
+        * display
+        * 
+        * @param dmaapContext
+        * @throws ConfigDbException
+        * @throws IOException
+        */
+       void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException,
+                       IOException;
+
+       /**
+        * Fetching detials of apikey in a templated form for display
+        * 
+        * @param dmaapContext
+        * @param apiKey
+        * @throws Exception
+        */
+       void getApiKey(DMaaPContext dmaapContext, final String apiKey)
+                       throws CambriaApiException, ConfigDbException, JSONException, IOException;
+
+       /**
+        * Fetching list of all the topics and returning in a templated form for
+        * display
+        * 
+        * @param dmaapContext
+        * @throws ConfigDbException
+        * @throws IOException
+        */
+       void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException,
+                       IOException;
+
+       /**
+        * Fetching detials of topic in a templated form for display
+        * 
+        * @param dmaapContext
+        * @param topic
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws TopicExistsException
+        */
+       void getTopic(DMaaPContext dmaapContext, final String topic)
+                       throws ConfigDbException, IOException, TopicExistsException;
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/AdminServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/AdminServiceImpl.java
new file mode 100644 (file)
index 0000000..deed9c2
--- /dev/null
@@ -0,0 +1,189 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service.impl;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.limits.Blacklist;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.backends.Consumer;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.dmf.mr.service.AdminService;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
+import org.springframework.stereotype.Component;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Set;
+
+
+/**
+ * @author muzainulhaque.qazi
+ *
+ */
+@Component
+public class AdminServiceImpl implements AdminService {
+
+       //private Logger log = Logger.getLogger(AdminServiceImpl.class.toString());
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(AdminServiceImpl.class);
+       /**
+        * getConsumerCache returns consumer cache
+        * @param dMaaPContext context
+        * @throws IOException ex
+        * @throws AccessDeniedException 
+        */
+       @Override       
+       public void showConsumerCache(DMaaPContext dMaaPContext) throws IOException, AccessDeniedException {
+               adminAuthenticate(dMaaPContext);
+               
+               JSONObject consumers = new JSONObject();
+               JSONArray jsonConsumersList = new JSONArray();
+
+               for (Consumer consumer : getConsumerFactory(dMaaPContext).getConsumers()) {
+                       JSONObject consumerObject = new JSONObject();
+                       consumerObject.put("name", consumer.getName());
+                       consumerObject.put("created", consumer.getCreateTimeMs());
+                       consumerObject.put("accessed", consumer.getLastAccessMs());
+                       jsonConsumersList.put(consumerObject);
+               }
+
+               consumers.put("consumers", jsonConsumersList);
+               log.info("========== AdminServiceImpl: getConsumerCache: " + jsonConsumersList.toString() + "===========");
+               DMaaPResponseBuilder.respondOk(dMaaPContext, consumers);
+       }
+
+       /**
+        * 
+        * dropConsumerCache() method clears consumer cache
+        * @param dMaaPContext context
+        * @throws JSONException ex
+        * @throws IOException ex
+        * @throws AccessDeniedException 
+        * 
+        */
+       @Override
+       public void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException, AccessDeniedException {
+               adminAuthenticate(dMaaPContext);
+               getConsumerFactory(dMaaPContext).dropCache();
+               DMaaPResponseBuilder.respondOkWithHtml(dMaaPContext, "Consumer cache cleared successfully");
+               // log.info("========== AdminServiceImpl: dropConsumerCache: Consumer
+               // Cache successfully dropped.===========");
+       }
+
+       /** 
+        * getfConsumerFactory returns CosnumerFactory details
+        * @param dMaaPContext contxt
+        * @return ConsumerFactory obj
+        * 
+        */
+       private ConsumerFactory getConsumerFactory(DMaaPContext dMaaPContext) {
+               return dMaaPContext.getConfigReader().getfConsumerFactory();
+       }
+       
+       /**
+        * return ipblacklist
+        * @param dMaaPContext context
+        * @return blacklist obj
+        */
+       private static Blacklist getIpBlacklist(DMaaPContext dMaaPContext) {
+               return dMaaPContext.getConfigReader().getfIpBlackList();
+       }
+       
+       
+       /**
+        * Get list of blacklisted ips
+        */
+       @Override
+       public void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException
+       {
+               adminAuthenticate ( dMaaPContext );
+
+               DMaaPResponseBuilder.respondOk ( dMaaPContext,
+                       new JSONObject().put ( "blacklist",
+                                       setToJsonArray ( getIpBlacklist (dMaaPContext).asSet() ) ) );
+       }
+       
+       public static JSONArray setToJsonArray ( Set<?> fields )
+       {
+               return collectionToJsonArray ( fields );
+       }
+       
+       public static JSONArray collectionToJsonArray ( Collection<?> fields )
+       {
+               final JSONArray a = new JSONArray ();
+               if ( fields != null )
+               {
+                       for ( Object o : fields )
+                       {
+                               a.put ( o );
+                       }
+               }
+               return a;
+       }
+       
+       /**
+        * Add ip to blacklist
+        */
+       @Override
+       public void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException
+       {
+               adminAuthenticate ( dMaaPContext );
+
+               getIpBlacklist (dMaaPContext).add ( ip );
+               DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext );
+       }
+       
+       /**
+        * Remove ip from blacklist
+        */
+       @Override
+       public void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException
+       {
+               adminAuthenticate ( dMaaPContext );
+
+               getIpBlacklist (dMaaPContext).remove ( ip );
+               DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext );
+       }
+       
+       /**
+        * Authenticate if user is admin
+        * @param dMaaPContext context
+        * @throws AccessDeniedException ex
+        */
+       private static void adminAuthenticate ( DMaaPContext dMaaPContext ) throws AccessDeniedException
+       {
+               
+               final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dMaaPContext);
+               if ( user == null || !user.getKey ().equals ( "admin" ) )
+               {
+                       throw new AccessDeniedException ();
+               }
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ApiKeysServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ApiKeysServiceImpl.java
new file mode 100644 (file)
index 0000000..fe206c0
--- /dev/null
@@ -0,0 +1,320 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service.impl;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.beans.ApiKeyBean;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.dmf.mr.service.ApiKeysService;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
+import org.onap.dmaap.dmf.mr.utils.Emailer;
+import org.springframework.stereotype.Service;
+
+import java.io.IOException;
+
+/**
+ * Implementation of the ApiKeysService, this will provide the below operations,
+ * getAllApiKeys, getApiKey, createApiKey, updateApiKey, deleteApiKey
+ * 
+ * @author nilanjana.maity
+ */
+@Service
+public class ApiKeysServiceImpl implements ApiKeysService {
+
+       
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(ApiKeysServiceImpl.class.toString());
+       /**
+        * This method will provide all the ApiKeys present in kafka server.
+        * 
+        * @param dmaapContext
+        * @throws ConfigDbException
+        * @throws IOException
+        */
+       public void getAllApiKeys(DMaaPContext dmaapContext)
+                       throws ConfigDbException, IOException {
+
+               ConfigurationReader configReader = dmaapContext.getConfigReader();
+
+               log.info("configReader : " + configReader.toString());
+
+               final JSONObject result = new JSONObject();
+               final JSONArray keys = new JSONArray();
+               result.put("apiKeys", keys);
+
+               NsaApiDb<NsaSimpleApiKey> apiDb = configReader.getfApiKeyDb();
+
+               for (String key : apiDb.loadAllKeys()) {
+                       keys.put(key);
+               }
+               log.info("========== ApiKeysServiceImpl: getAllApiKeys: Api Keys are : "
+                               + keys.toString() + "===========");
+               DMaaPResponseBuilder.respondOk(dmaapContext, result);
+       }
+
+       /**
+        * @param dmaapContext
+        * @param apikey
+        * @throws ConfigDbException
+        * @throws IOException
+        */
+       @Override
+       public void getApiKey(DMaaPContext dmaapContext, String apikey)
+                       throws ConfigDbException, IOException {
+
+               String errorMsg = "Api key name is not mentioned.";
+               int errorCode = HttpStatusCodes.k400_badRequest;
+               
+               if (null != apikey) {
+                       NsaSimpleApiKey simpleApiKey = getApiKeyDb(dmaapContext)
+                                       .loadApiKey(apikey);
+                       
+               
+                       if (null != simpleApiKey) {
+                               JSONObject result = simpleApiKey.asJsonObject();
+                               DMaaPResponseBuilder.respondOk(dmaapContext, result);
+                               log.info("========== ApiKeysServiceImpl: getApiKey : "
+                                               + result.toString() + "===========");
+                               return;
+                       } else {
+                               errorMsg = "Api key [" + apikey + "] does not exist.";
+                               errorCode = HttpStatusCodes.k404_notFound;
+                               log.info("========== ApiKeysServiceImpl: getApiKey: Error : API Key does not exist. "
+                                               + "===========");
+                               DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
+                                               errorMsg);
+                               throw new IOException();
+                       }
+               }
+
+       }
+
+       /**
+        * @param dmaapContext
+        * @param nsaApiKey
+        * @throws KeyExistsException
+        * @throws ConfigDbException
+        * @throws IOException
+        */
+       @Override
+       public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey)
+                       throws KeyExistsException, ConfigDbException, IOException {
+
+               log.debug("TopicService: : createApiKey....");
+               
+                       String contactEmail = nsaApiKey.getEmail();
+                       final boolean emailProvided = contactEmail != null && contactEmail.length() > 0 && contactEmail.indexOf("@") > 1 ;
+                        String kSetting_AllowAnonymousKeys= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"apiKeys.allowAnonymous");
+                        if(null==kSetting_AllowAnonymousKeys) {
+                                kSetting_AllowAnonymousKeys ="false";
+                        }
+           
+                        if ( kSetting_AllowAnonymousKeys.equalsIgnoreCase("true")    &&  !emailProvided   )
+             {
+               DMaaPResponseBuilder.respondWithErrorInJson(dmaapContext, 400, "You must provide an email address.");
+               return;
+             }
+               
+               
+               final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
+               String apiKey = nsaApiKey.getKey();
+               String sharedSecret = nsaApiKey.getSharedSecret();
+               final NsaSimpleApiKey key = apiKeyDb.createApiKey(apiKey,
+                               sharedSecret);
+               if (null != key) {
+
+                       if (null != nsaApiKey.getEmail()) {
+                               key.setContactEmail(nsaApiKey.getEmail());
+                       }
+
+                       if (null != nsaApiKey.getDescription()) {
+                               key.setDescription(nsaApiKey.getDescription());
+                       }
+
+                       log.debug("=======ApiKeysServiceImpl: createApiKey : saving api key : "
+                                       + key.toString() + "=====");
+                       apiKeyDb.saveApiKey(key);
+                       
+                       // email out the secret to validate the email address
+                       if ( emailProvided )
+                       {
+                               String body = "\n" + "Your email address was provided as the creator of new API key \""
+                               + apiKey + "\".\n" + "\n" + "If you did not make this request, please let us know."
+                                + "but don't worry -"
+                               + " the API key is useless without the information below, which has been provided "
+                               + "only to you.\n" + "\n\n" + "For API key \"" + apiKey + "\", use API key secret:\n\n\t"
+                               + sharedSecret + "\n\n" + "Note that it's normal to share the API key"
+                               + " (" + apiKey + "). "                         
+                               + "This is how you are granted access to resources " + "like a UEB topic or Flatiron scope. "
+                               + "However, you should NOT share the API key's secret. " + "The API key is associated with your"
+                               + " email alone. ALL access to data made with this " + "key will be your responsibility. If you "
+                               + "share the secret, someone else can use the API key " + "to access proprietary data with your "
+                               + "identity.\n" + "\n" + "Enjoy!\n" + "\n" + "The GFP/SA-2020 Team";
+       
+                       Emailer em = dmaapContext.getConfigReader().getSystemEmailer();
+                       em.send(contactEmail, "New API Key", body);
+                       }
+                       log.debug("TopicService: : sending response.");
+       
+                       JSONObject o = key.asJsonObject();
+                       
+                       o.put ( NsaSimpleApiKey.kApiSecretField,
+                                       emailProvided ?
+                                               "Emailed to " + contactEmail + "." :
+                                               key.getSecret ()
+                               );
+                       DMaaPResponseBuilder.respondOk(dmaapContext,
+                                       o);
+                       
+                       return;
+               } else {
+                       log.debug("=======ApiKeysServiceImpl: createApiKey : Error in creating API Key.=====");
+                       DMaaPResponseBuilder.respondWithError(dmaapContext,
+                                       HttpStatusCodes.k500_internalServerError,
+                                       "Failed to create api key.");
+                       throw new KeyExistsException(apiKey);
+               }
+       }
+
+       /**
+        * @param dmaapContext
+        * @param apikey
+        * @param nsaApiKey
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws AccessDeniedException
+        */
+       @Override
+       public void updateApiKey(DMaaPContext dmaapContext, String apikey,
+                       ApiKeyBean nsaApiKey) throws ConfigDbException, IOException, AccessDeniedException {
+
+               String errorMsg = "Api key name is not mentioned.";
+               int errorCode = HttpStatusCodes.k400_badRequest;
+
+               if (null != apikey) {
+                       final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
+                       final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey);
+                       boolean shouldUpdate = false;
+
+                       if (null != key) {
+                               final NsaApiKey user = DMaaPAuthenticatorImpl
+                                               .getAuthenticatedUser(dmaapContext);
+
+                               if (user == null || !user.getKey().equals(key.getKey())) {
+                                       throw new AccessDeniedException("You must authenticate with the key you'd like to update.");
+                               }
+
+                               if (null != nsaApiKey.getEmail()) {
+                                       key.setContactEmail(nsaApiKey.getEmail());
+                                       shouldUpdate = true;
+                               }
+
+                               if (null != nsaApiKey.getDescription()) {
+                                       key.setDescription(nsaApiKey.getDescription());
+                                       shouldUpdate = true;
+                               }
+
+                               if (shouldUpdate) {
+                                       apiKeyDb.saveApiKey(key);
+                               }
+
+                               log.info("======ApiKeysServiceImpl : updateApiKey : Key Updated Successfully :"
+                                               + key.toString() + "=========");
+                               DMaaPResponseBuilder.respondOk(dmaapContext,
+                                               key.asJsonObject());
+                               return;
+                       }
+               } else {
+                       errorMsg = "Api key [" + apikey + "] does not exist.";
+                       errorCode = HttpStatusCodes.k404_notFound;
+                       DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
+                                       errorMsg);
+                       log.info("======ApiKeysServiceImpl : updateApiKey : Error in Updating Key.============");
+                       throw new IOException();
+               }
+       }
+
+       /**
+        * @param dmaapContext
+        * @param apikey
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws AccessDeniedException
+        */
+       @Override
+       public void deleteApiKey(DMaaPContext dmaapContext, String apikey)
+                       throws ConfigDbException, IOException, AccessDeniedException {
+
+               String errorMsg = "Api key name is not mentioned.";
+               int errorCode = HttpStatusCodes.k400_badRequest;
+
+               if (null != apikey) {
+                       final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
+                       final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey);
+
+                       if (null != key) {
+
+                               final NsaApiKey user = DMaaPAuthenticatorImpl
+                                               .getAuthenticatedUser(dmaapContext);
+                               if (user == null || !user.getKey().equals(key.getKey())) {
+                                       throw new AccessDeniedException("You don't own the API key.");
+                               }
+
+                               apiKeyDb.deleteApiKey(key);
+                               log.info("======ApiKeysServiceImpl : deleteApiKey : Deleted Key successfully.============");
+                               DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
+                                               "Api key [" + apikey + "] deleted successfully.");
+                               return;
+                       }
+               } else {
+                       errorMsg = "Api key [" + apikey + "] does not exist.";
+                       errorCode = HttpStatusCodes.k404_notFound;
+                       DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
+                                       errorMsg);
+                       log.info("======ApiKeysServiceImpl : deleteApiKey : Error while deleting key.============");
+                       throw new IOException();
+               }
+       }
+
+       /**
+        * 
+        * @param dmaapContext
+        * @return
+        */
+       private NsaApiDb<NsaSimpleApiKey> getApiKeyDb(DMaaPContext dmaapContext) {
+               ConfigurationReader configReader = dmaapContext.getConfigReader();
+               return configReader.getfApiKeyDb();
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/BaseTransactionDbImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/BaseTransactionDbImpl.java
new file mode 100644 (file)
index 0000000..1ad7e3a
--- /dev/null
@@ -0,0 +1,153 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service.impl;
+
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.ConfigPath;
+import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionFactory;
+import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionObj;
+import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionObjDB;
+import org.onap.dmaap.dmf.mr.transaction.TransactionObj;
+
+import java.util.Set;
+import java.util.TreeSet;
+
+/**
+ * Persistent storage for Transaction objects built over an abstract config db.
+ * 
+ * @author anowarul.islam
+ *
+ * @param <K>
+ */
+public class BaseTransactionDbImpl<K extends DMaaPTransactionObj> implements DMaaPTransactionObjDB<K> {
+
+       private final ConfigDb fDb;
+       private final ConfigPath fBasePath;
+       private final DMaaPTransactionFactory<K> fKeyFactory;
+
+       private static final String kStdRootPath = "/transaction";
+
+       private ConfigPath makePath(String transactionId) {
+               return fBasePath.getChild(transactionId);
+       }
+
+       /**
+        * Construct an Transaction db over the given config db at the standard
+        * location
+        * 
+        * @param db
+        * @param keyFactory
+        * @throws ConfigDbException
+        */
+       public BaseTransactionDbImpl(ConfigDb db, DMaaPTransactionFactory<K> keyFactory) throws ConfigDbException {
+               this(db, kStdRootPath, keyFactory);
+       }
+
+       /**
+        * Construct an Transaction db over the given config db using the given root
+        * location
+        * 
+        * @param db
+        * @param rootPath
+        * @param keyFactory
+        * @throws ConfigDbException
+        */
+       public BaseTransactionDbImpl(ConfigDb db, String rootPath, DMaaPTransactionFactory<K> keyFactory)
+                       throws ConfigDbException {
+               fDb = db;
+               fBasePath = db.parse(rootPath);
+               fKeyFactory = keyFactory;
+
+               if (!db.exists(fBasePath)) {
+                       db.store(fBasePath, "");
+               }
+       }
+
+       /**
+        * Create a new Transaction Obj. If one exists,
+        * 
+        * @param id
+        * @return the new Transaction record
+        * @throws ConfigDbException
+        */
+       public synchronized K createTransactionObj(String id) throws KeyExistsException, ConfigDbException {
+               final ConfigPath path = makePath(id);
+               if (fDb.exists(path)) {
+                       throw new KeyExistsException(id);
+               }
+
+               // make one, store it, return it
+               final K newKey = fKeyFactory.makeNewTransactionId(id);
+               fDb.store(path, newKey.serialize());
+               return newKey;
+       }
+
+       /**
+        * Save an Transaction record. This must be used after changing auxiliary
+        * data on the record. Note that the transaction object must exist (via
+        * createTransactionObj).
+        * 
+        * @param transaction
+        *            object
+        * @throws ConfigDbException
+        */
+       @Override
+       public synchronized void saveTransactionObj(K trnObj) throws ConfigDbException {
+               final ConfigPath path = makePath(trnObj.getId());
+               if (!fDb.exists(path) || !(trnObj instanceof TransactionObj)) {
+                       throw new IllegalStateException(trnObj.getId() + " is not known to this database");
+               }
+               fDb.store(path, ((TransactionObj) trnObj).serialize());
+       }
+
+       /**
+        * Load an Transaction record based on the Transaction Id value
+        * 
+        * @param transactionId
+        * @return an Transaction Object record or null
+        * @throws ConfigDbException
+        */
+       @Override
+       public synchronized K loadTransactionObj(String transactionId) throws ConfigDbException {
+               final String data = fDb.load(makePath(transactionId));
+               if (data != null) {
+                       return fKeyFactory.makeNewTransactionObj(data);
+               }
+               return null;
+       }
+
+       /**
+        * Load all transactions known to this database. (This could be expensive.)
+        * 
+        * @return a set of all Transaction objects
+        * @throws ConfigDbException
+        */
+       public synchronized Set<String> loadAllTransactionObjs() throws ConfigDbException {
+               final TreeSet<String> result = new TreeSet<>();
+               for (ConfigPath cp : fDb.loadChildrenNames(fBasePath)) {
+                       result.add(cp.getName());
+               }
+               return result;
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ErrorResponseProvider.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ErrorResponseProvider.java
new file mode 100644 (file)
index 0000000..3456eb9
--- /dev/null
@@ -0,0 +1,147 @@
+/*******************************************************************************
+ *  ============LICENSE_START===================================================
+ *  org.onap.dmaap
+ *  ============================================================================
+ *  Copyright © 2019 Nokia Intellectual Property. All rights reserved.
+ *  ============================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=====================================================
+ ******************************************************************************/
+package org.onap.dmaap.dmf.mr.service.impl;
+
+import com.google.common.base.Preconditions;
+import org.apache.http.HttpStatus;
+import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
+import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode;
+import org.onap.dmaap.dmf.mr.exception.ErrorResponse;
+import org.onap.dmaap.dmf.mr.utils.Utils;
+
+import java.util.Date;
+
+class ErrorResponseProvider {
+
+    private String clientId;
+    private String topicName;
+    private String consumerGroup;
+    private String remoteHost;
+    private String publisherId;
+    private String publisherIp;
+    private DMaaPErrorMessages errorMessages;
+
+    private ErrorResponseProvider() {
+
+    }
+
+    ErrorResponse getIpBlacklistedError(String remoteAddr) {
+        return new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+            DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+            "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
+            null, Utils.getFormattedDate(new Date()), topicName, publisherId,
+            publisherIp, consumerGroup + "/" + clientId, remoteHost);
+    }
+
+    ErrorResponse getTopicNotFoundError() {
+        return new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+            DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),
+            errorMessages.getTopicNotExist() + "-[" + topicName + "]", null, Utils.getFormattedDate(new Date()),
+            topicName, publisherId, publisherIp, consumerGroup + "/" + clientId, remoteHost);
+    }
+
+    ErrorResponse getAafAuthorizationError(String permission, String action) {
+        return new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+            DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+            errorMessages.getNotPermitted1() + action + errorMessages.getNotPermitted2() + topicName + " on "
+                + permission,
+            null, Utils.getFormattedDate(new Date()), topicName, publisherId, publisherIp, consumerGroup + "/" + clientId,
+            remoteHost);
+    }
+
+    ErrorResponse getServiceUnavailableError(String msg) {
+        return new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+            DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+            errorMessages.getServerUnav() + msg, null, Utils.getFormattedDate(new Date()), topicName,
+            publisherId, publisherIp, consumerGroup + "/" + clientId, remoteHost);
+    }
+
+    ErrorResponse getConcurrentModificationError() {
+        return new ErrorResponse(HttpStatus.SC_CONFLICT,
+            DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(),
+            "Couldn't respond to client, possible of consumer requests from more than one server. Please contact MR team if you see this issue occurs continously", null,
+            Utils.getFormattedDate(new Date()), topicName, publisherId, publisherIp, consumerGroup + "/" + clientId, remoteHost);
+    }
+
+    ErrorResponse getGenericError(String msg) {
+        return new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+            DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+            "Couldn't respond to client, closing cambria consumer" + msg, null,
+            Utils.getFormattedDate(new Date()), topicName, publisherId, publisherIp, consumerGroup + "/" + clientId, remoteHost);
+    }
+
+    public static class Builder {
+
+        private String clientId;
+        private String topicName;
+        private String consumerGroup;
+        private String remoteHost;
+        private String publisherId;
+        private String publisherIp;
+        DMaaPErrorMessages errorMessages;
+
+        Builder withErrorMessages(DMaaPErrorMessages errorMessages) {
+            this.errorMessages = errorMessages;
+            return this;
+        }
+
+        Builder withTopic(String topic) {
+            this.topicName = topic;
+            return this;
+        }
+
+        Builder withClient(String client) {
+            this.clientId = client;
+            return this;
+        }
+
+        Builder withConsumerGroup(String consumerGroup) {
+            this.consumerGroup = consumerGroup;
+            return this;
+        }
+
+        Builder withRemoteHost(String remoteHost) {
+            this.remoteHost = remoteHost;
+            return this;
+        }
+
+        Builder withPublisherId(String publisherId) {
+            this.publisherId = publisherId;
+            return this;
+        }
+
+        Builder withPublisherIp(String publisherIp) {
+            this.publisherIp = publisherIp;
+            return this;
+        }
+
+        public ErrorResponseProvider build() {
+            Preconditions.checkArgument(errorMessages!=null);
+            ErrorResponseProvider errRespProvider = new ErrorResponseProvider();
+            errRespProvider.errorMessages = this.errorMessages;
+            errRespProvider.clientId = this.clientId;
+            errRespProvider.consumerGroup = this.consumerGroup;
+            errRespProvider.topicName = this.topicName;
+            errRespProvider.remoteHost = this.remoteHost;
+            errRespProvider.publisherId = this.publisherId;
+            errRespProvider.publisherIp = this.publisherIp;
+            return errRespProvider;
+        }
+    }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImpl.java
new file mode 100644 (file)
index 0000000..9f35812
--- /dev/null
@@ -0,0 +1,768 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service.impl;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.service.standards.MimeTypes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.util.rrConvertor;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.math.NumberUtils;
+import org.apache.http.HttpStatus;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.errors.TopicExistsException;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.Consumer;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import org.onap.dmaap.dmf.mr.backends.MetricsSet;
+import org.onap.dmaap.dmf.mr.backends.Publisher.message;
+import org.onap.dmaap.dmf.mr.beans.DMaaPCambriaLimiter;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.exception.DMaaPAccessDeniedException;
+import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
+import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode;
+import org.onap.dmaap.dmf.mr.exception.ErrorResponse;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+import org.onap.dmaap.dmf.mr.resources.CambriaEventSet;
+import org.onap.dmaap.dmf.mr.resources.CambriaOutboundEventStream;
+import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticator;
+import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticatorImpl;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.dmf.mr.service.EventsService;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder.StreamWriter;
+import org.onap.dmaap.dmf.mr.utils.Utils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.ConcurrentModificationException;
+import java.util.Date;
+import java.util.LinkedList;
+
+/**
+ * This class provides the functinality to publish and subscribe message to
+ * kafka
+ * 
+ * @author Ramkumar Sembaiyam
+ *
+ */
+@Service
+public class EventsServiceImpl implements EventsService {
+       
+       private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
+       private static final String BATCH_LENGTH = "event.batch.length";
+       private static final String TRANSFER_ENCODING = "Transfer-Encoding";
+       private static final String TIMEOUT_PROPERTY = "timeout";
+       private static final String SUBSCRIBE_ACTION = "sub";
+       private static final String PUBLISH_ACTION = "pub";
+
+       @Autowired
+       private DMaaPErrorMessages errorMessages;
+
+       String getPropertyFromAJSCmap(String propertyKey) {
+               return AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, propertyKey);
+       }
+
+       public DMaaPErrorMessages getErrorMessages() {
+               return errorMessages;
+       }
+
+       public void setErrorMessages(DMaaPErrorMessages errorMessages) {
+               this.errorMessages = errorMessages;
+       }
+
+       /**
+        * @param ctx
+        * @param topic
+        * @param consumerGroup
+        * @param clientId
+        * @throws ConfigDbException,
+        *             TopicExistsException, AccessDeniedException,
+        *             UnavailableException, CambriaApiException, IOException
+        * 
+        * 
+        */
+       @Override
+       public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
+                       throws ConfigDbException, AccessDeniedException, UnavailableException,
+                       CambriaApiException, IOException {
+
+               final long startTime = System.currentTimeMillis();
+               final HttpServletRequest req = ctx.getRequest();
+               final LogWrap logger = new LogWrap(topic, consumerGroup, clientId);
+               final String remoteHost = req.getRemoteHost();
+               ErrorResponseProvider errRespProvider = new ErrorResponseProvider.Builder().withErrorMessages(errorMessages)
+                       .withTopic(topic).withConsumerGroup(consumerGroup).withClient(clientId).withRemoteHost(remoteHost).build();
+
+               validateIpBlacklist(errRespProvider, ctx);
+
+               final Topic metaTopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+               if (metaTopic == null) {
+                       throw new CambriaApiException(errRespProvider.getTopicNotFoundError());
+               }
+
+               boolean isAAFTopic = authorizeClientWhenNeeded(ctx, metaTopic, topic, errRespProvider, SUBSCRIBE_ACTION);
+
+               final long elapsedMs1 = System.currentTimeMillis() - startTime;
+               logger.info("Time taken in getEvents Authorization " + elapsedMs1 + " ms for " + topic + " " + consumerGroup
+                               + " " + clientId);
+
+               verifyHostId();
+               final boolean pretty = isPrettyPrintEnabled();
+               final boolean withMeta = isMetaOffsetEnabled();
+               int timeoutMs = getMessageTimeout(req);
+               int limit = getMessageLimit(req);
+               String topicFilter = (null != req.getParameter("filter")) ? req.getParameter("filter") : CambriaConstants.kNoFilter;
+               logger.info("fetch: timeout=" + timeoutMs + ", limit=" + limit + ", filter=" + topicFilter + " from Remote host "+ctx.getRequest().getRemoteHost());
+
+               Consumer consumer = null;
+               try {
+                       final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+                       final DMaaPCambriaLimiter rl = ctx.getConfigReader().getfRateLimiter();
+                       rl.onCall(topic, consumerGroup, clientId, remoteHost);
+                       consumer = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs,
+                                       remoteHost);
+                       CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(consumer).timeout(timeoutMs)
+                                       .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build();
+                       coes.setDmaapContext(ctx);
+                       coes.setTopic(metaTopic);
+                       coes.setTransEnabled(isTransEnabled() || isAAFTopic);
+                       coes.setTopicStyle(isAAFTopic);
+                       final long elapsedMs2 = System.currentTimeMillis() - startTime;
+                       logger.info("Time taken in getEvents getConsumerFor " + elapsedMs2 + " ms for " + topic + " "
+                                       + consumerGroup + " " + clientId);
+
+                       respondOkWithStream(ctx, coes);
+                       // No IOException thrown during respondOkWithStream, so commit the
+                       // new offsets to all the brokers
+                       consumer.commitOffsets();
+                       final int sent = coes.getSentCount();
+                       metricsSet.consumeTick(sent);
+                       rl.onSend(topic, consumerGroup, clientId, sent);
+                       final long elapsedMs = System.currentTimeMillis() - startTime;
+                       logger.info("Sent " + sent + " msgs in " + elapsedMs + " ms; committed to offset " + consumer.getOffset() + " for "
+                                       + topic + " " + consumerGroup + " " + clientId + " on to the server "
+                                       + remoteHost);
+
+               } catch (UnavailableException excp) {
+                       logger.warn(excp.getMessage(), excp);
+                       ErrorResponse errRes = errRespProvider.getServiceUnavailableError(excp.getMessage());
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+
+               } catch (ConcurrentModificationException excp1) {
+                       LOG.info(excp1.getMessage() + "on " + topic + " " + consumerGroup + " ****** " + clientId + " from Remote"+remoteHost);
+                       ErrorResponse errRes = errRespProvider.getConcurrentModificationError();
+                       logger.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+                       
+               } catch (Exception excp) {
+                       logger.info("Couldn't respond to client, closing cambria consumer " + " " + topic + " " + consumerGroup
+                                       + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE + " ****** " + excp);
+                       ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
+                       ErrorResponse errRes = errRespProvider.getGenericError(excp.getMessage());
+                       logger.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               } finally {
+                       if (consumer != null && !isCacheEnabled()) {
+                               try {
+                                       consumer.close();
+                               } catch (Exception e) {
+                                       logger.info("***Exception occurred in getEvents finally block while closing the consumer " + " "
+                                                       + topic + " " + consumerGroup + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE
+                                                       + " " + e);
+                               }
+                       }
+               }
+       }
+
+       private void validateIpBlacklist(ErrorResponseProvider errResponseProvider, DMaaPContext ctx) throws CambriaApiException {
+               final String remoteAddr = Utils.getRemoteAddress(ctx);
+               if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
+                       ErrorResponse errRes = errResponseProvider.getIpBlacklistedError(remoteAddr);
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               }
+       }
+
+       private boolean authorizeClientWhenNeeded(DMaaPContext ctx, Topic metaTopic, String topicName,
+               ErrorResponseProvider errRespProvider, String action) throws CambriaApiException, AccessDeniedException {
+
+               boolean isAAFTopic = false;
+               String metricTopicName = getMetricTopicName();
+               if(!metricTopicName.equalsIgnoreCase(topicName)) {
+                       if(isCadiEnabled() && isTopicNameEnforcedAaf(topicName)) {
+                               isAAFTopic = true;
+                               DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+                               String permission = aaf.aafPermissionString(topicName, action);
+                               if (!aaf.aafAuthentication(ctx.getRequest(), permission)) {
+                                       ErrorResponse errRes = errRespProvider.getAafAuthorizationError(permission, action);
+                                       LOG.info(errRes.toString());
+                                       throw new DMaaPAccessDeniedException(errRes);
+
+                               }
+                       } else if(metaTopic!=null && null != metaTopic.getOwner() && !metaTopic.getOwner().isEmpty()) {
+                               final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+                               if(SUBSCRIBE_ACTION.equals(action)) {
+                                       metaTopic.checkUserRead(user);
+                               } else if(PUBLISH_ACTION.equals(action)) {
+                                       metaTopic.checkUserWrite(user);
+                               }
+                       }
+               }
+               return isAAFTopic;
+       }
+
+       boolean isCadiEnabled() {
+               return Utils.isCadiEnabled();
+       }
+
+       void respondOkWithStream(DMaaPContext ctx, StreamWriter coes) throws IOException{
+               DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+               DMaaPResponseBuilder.respondOkWithStream(ctx, MediaType.APPLICATION_JSON, coes);
+       }
+
+       private int getMessageLimit(HttpServletRequest request) {
+               return NumberUtils.toInt(request.getParameter("limit"), CambriaConstants.kNoLimit);
+       }
+
+       private int getMessageTimeout(HttpServletRequest request) {
+               String timeoutMsAsString = getPropertyFromAJSCmap(TIMEOUT_PROPERTY);
+               int defaultTimeoutMs = StringUtils.isNotEmpty(timeoutMsAsString) ? NumberUtils.toInt(timeoutMsAsString, CambriaConstants.kNoTimeout) :
+                       CambriaConstants.kNoTimeout;
+
+               String timeoutProperty = request.getParameter(TIMEOUT_PROPERTY);
+               return timeoutProperty != null ? NumberUtils.toInt(timeoutProperty, defaultTimeoutMs) : defaultTimeoutMs;
+       }
+
+       private boolean isPrettyPrintEnabled() {
+               return rrConvertor.convertToBooleanBroad(getPropertyFromAJSCmap("pretty"));
+       }
+
+       private boolean isMetaOffsetEnabled() {
+               return rrConvertor.convertToBooleanBroad(getPropertyFromAJSCmap( "meta"));
+       }
+
+       private boolean isTopicNameEnforcedAaf(String topicName) {
+               String topicNameStd = getPropertyFromAJSCmap("enforced.topic.name.AAF");
+               return StringUtils.isNotEmpty(topicNameStd) && topicName.startsWith(topicNameStd);
+       }
+
+       private boolean isCacheEnabled() {
+               String cachePropsSetting = getPropertyFromAJSCmap(ConsumerFactory.kSetting_EnableCache);
+               return StringUtils.isNotEmpty(cachePropsSetting) ? Boolean.parseBoolean(cachePropsSetting) : ConsumerFactory.kDefault_IsCacheEnabled;
+       }
+
+       private void verifyHostId() {
+               String lhostId = getPropertyFromAJSCmap("clusterhostid");
+               if (StringUtils.isEmpty(lhostId)) {
+                       try {
+                               InetAddress.getLocalHost().getCanonicalHostName();
+                       } catch (UnknownHostException e) {
+                               LOG.warn("Unknown Host Exception error occurred while getting getting hostid", e);
+                       }
+
+               }
+       }
+
+       private String getMetricTopicName() {
+               String metricTopicFromProps = getPropertyFromAJSCmap("metrics.send.cambria.topic");
+               return StringUtils.isNotEmpty(metricTopicFromProps) ? metricTopicFromProps : "msgrtr.apinode.metrics.dmaap";
+       }
+
+       /**
+        * @throws missingReqdSetting
+        * 
+        */
+       @Override
+       public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+                       final String requestTime) throws ConfigDbException, AccessDeniedException,
+                       CambriaApiException, IOException, missingReqdSetting {
+
+               final long startMs = System.currentTimeMillis();
+               String remoteHost = ctx.getRequest().getRemoteHost();
+               ErrorResponseProvider errRespProvider = new ErrorResponseProvider.Builder().withErrorMessages(errorMessages)
+                       .withTopic(topic).withRemoteHost(remoteHost).withPublisherIp(remoteHost)
+                       .withPublisherId(Utils.getUserApiKey(ctx.getRequest())).build();
+
+               validateIpBlacklist(errRespProvider, ctx);
+
+               final Topic metaTopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+
+               final boolean isAAFTopic = authorizeClientWhenNeeded(ctx, metaTopic, topic, errRespProvider, PUBLISH_ACTION);
+
+               final HttpServletRequest req = ctx.getRequest();
+               boolean chunked = isRequestedChunk(req);
+               String mediaType = getMediaType(req);
+               boolean transactionRequired = isTransactionIdRequired();
+
+               if (isAAFTopic || transactionRequired) {
+                       pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
+               } else {
+                       pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
+               }
+
+               final long endMs = System.currentTimeMillis();
+               final long totalMs = endMs - startMs;
+               LOG.info("Overall Response time - Published " + " msgs in " + totalMs + " ms for topic " + topic);
+       }
+
+       private boolean isRequestedChunk(HttpServletRequest request) {
+               return null != request.getHeader(TRANSFER_ENCODING) &&
+                       request.getHeader(TRANSFER_ENCODING).contains("chunked");
+       }
+
+       private String getMediaType(HttpServletRequest request) {
+               String mediaType = request.getContentType();
+               if (mediaType == null || mediaType.length() == 0) {
+                       return MimeTypes.kAppGenericBinary;
+               }
+               return mediaType.replace("; charset=UTF-8", "").trim();
+       }
+
+       private boolean isTransactionIdRequired() {
+               String transIdReqProperty = getPropertyFromAJSCmap("transidUEBtopicreqd");
+               return StringUtils.isNotEmpty(transIdReqProperty) && transIdReqProperty.equalsIgnoreCase("true");
+       }
+
+       /**
+        * 
+        * @param ctx
+        * @param topic
+        * @param msg
+        * @param defaultPartition
+        * @param chunked
+        * @param mediaType
+        * @throws ConfigDbException
+        * @throws AccessDeniedException
+        * @throws TopicExistsException
+        * @throws CambriaApiException
+        * @throws IOException
+        */
+       private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked,
+                       String mediaType)
+                       throws ConfigDbException, AccessDeniedException, CambriaApiException, IOException {
+               final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+               // setup the event set
+               final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
+
+               // start processing, building a batch to push to the backend
+               final long startMs = System.currentTimeMillis();
+               long count = 0;
+               long maxEventBatch = 1024L* 16;
+               String batchlen = getPropertyFromAJSCmap( BATCH_LENGTH);
+               if (null != batchlen && !batchlen.isEmpty())
+                       maxEventBatch = Long.parseLong(batchlen);
+               // long maxEventBatch =
+               
+               final LinkedList<message> batch = new LinkedList<>();
+               // final ArrayList<KeyedMessage<String, String>> kms = new
+
+               final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<>();
+               try {
+                       // for each message...
+                       message m = null;
+                       while ((m = events.next()) != null) {
+                               // add the message to the batch
+                               batch.add(m);
+                               // final KeyedMessage<String, String> data = new
+                               // KeyedMessage<String, String>(topic, m.getKey(),
+
+                               // kms.add(data);
+                               final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
+                                               m.getMessage());
+
+                               pms.add(data);
+                               // check if the batch is full
+                               final int sizeNow = batch.size();
+                               if (sizeNow > maxEventBatch) {
+                                       // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
+
+                                       // kms.clear();
+                                       ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                                       pms.clear();
+                                       batch.clear();
+                                       metricsSet.publishTick(sizeNow);
+                                       count += sizeNow;
+                               }
+                       }
+
+                       // send the pending batch
+                       final int sizeNow = batch.size();
+                       if (sizeNow > 0) {
+                               // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
+
+                               // kms.clear();
+                               ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                               pms.clear();
+                               batch.clear();
+                               metricsSet.publishTick(sizeNow);
+                               count += sizeNow;
+                       }
+
+                       final long endMs = System.currentTimeMillis();
+                       final long totalMs = endMs - startMs;
+
+                       LOG.info("Published " + count + " msgs in " + totalMs + " ms for topic " + topic + " from server "
+                                       + ctx.getRequest().getRemoteHost());
+
+                       // build a responseP
+                       final JSONObject response = new JSONObject();
+                       response.put("count", count);
+                       response.put("serverTimeMs", totalMs);
+                       respondOk(ctx, response);
+
+               } catch (Exception excp) {
+                       int status = HttpStatus.SC_NOT_FOUND;
+                       String errorMsg = null;
+                       if (excp instanceof CambriaApiException) {
+                               status = ((CambriaApiException) excp).getStatus();
+                               JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+                               JSONObject errObject = new JSONObject(jsonTokener);
+                               errorMsg = (String) errObject.get("message");
+
+                       }
+                       ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+                                       errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count
+                                                       + "." + errorMsg,
+                                       null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null,
+                                       null);
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+
+               }
+       }
+
+       /**
+        *
+        * @param ctx
+        * @param inputStream
+        * @param topic
+        * @param partitionKey
+        * @param requestTime
+        * @param chunked
+        * @param mediaType
+        * @throws ConfigDbException
+        * @throws AccessDeniedException
+        * @throws TopicExistsException
+        * @throws IOException
+        * @throws CambriaApiException
+        */
+       private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
+                       final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
+                       throws ConfigDbException, AccessDeniedException, IOException, CambriaApiException {
+
+               final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+               // setup the event set
+               final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
+
+               // start processing, building a batch to push to the backend
+               final long startMs = System.currentTimeMillis();
+               long count = 0;
+               long maxEventBatch = 1024L * 16;
+               String evenlen = getPropertyFromAJSCmap( BATCH_LENGTH);
+               if (null != evenlen && !evenlen.isEmpty())
+                       maxEventBatch = Long.parseLong(evenlen);
+               // final long maxEventBatch =
+
+               final LinkedList<message> batch = new LinkedList<message>();
+               // final ArrayList<KeyedMessage<String, String>> kms = new
+
+               final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
+               message m = null;
+               int messageSequence = 1;
+               Long batchId = 1L;
+               final boolean transactionEnabled = true;
+               int publishBatchCount = 0;
+               SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
+
+               // LOG.warn("Batch Start Id: " +
+       
+               try {
+                       // for each message...
+                       batchId = DMaaPContext.getBatchID();
+
+                       String responseTransactionId = null;
+
+                       while ((m = events.next()) != null) {
+
+                               // LOG.warn("Batch Start Id: " +
+                               
+
+                               addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
+                                               transactionEnabled);
+                               messageSequence++;
+
+                       
+                               batch.add(m);
+
+                               responseTransactionId = m.getLogDetails().getTransactionId();
+
+                               //JSONObject jsonObject = new JSONObject();
+                               //jsonObject.put("msgWrapMR", m.getMessage());
+                               //jsonObject.put("transactionId", responseTransactionId);
+                               // final KeyedMessage<String, String> data = new
+                               // KeyedMessage<String, String>(topic, m.getKey(),
+                       
+                               
+                               final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
+                                               m.getMessage());
+
+                               pms.add(data);
+                               // check if the batch is full
+                               final int sizeNow = batch.size();
+                               if (sizeNow >= maxEventBatch) {
+                                       String startTime = sdf.format(new Date());
+                                       LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+                                                       + batchId + "]");
+                                       try {
+                                               // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
+                                       
+                                               ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                                               // transactionLogs(batch);
+                                               for (message msg : batch) {
+                                                       LogDetails logDetails = msg.getLogDetails();
+                                                       LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+                                               }
+                                       } catch (Exception excp) {
+
+                                               int status = HttpStatus.SC_NOT_FOUND;
+                                               String errorMsg = null;
+                                               if (excp instanceof CambriaApiException) {
+                                                       status = ((CambriaApiException) excp).getStatus();
+                                                       JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+                                                       JSONObject errObject = new JSONObject(jsonTokener);
+                                                       errorMsg = (String) errObject.get("message");
+                                               }
+                                               ErrorResponse errRes = new ErrorResponse(status,
+                                                               DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+                                                               "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+                                                                               + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+                                                               null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                                               ctx.getRequest().getRemoteHost(), null, null);
+                                               LOG.info(errRes.toString());
+                                               throw new CambriaApiException(errRes);
+                                       }
+                                       pms.clear();
+                                       batch.clear();
+                                       metricsSet.publishTick(sizeNow);
+                                       publishBatchCount = sizeNow;
+                                       count += sizeNow;
+                                       
+                                       String endTime = sdf.format(new Date());
+                                       LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
+                                                       + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
+                                                       + ",Batch End Time=" + endTime + "]");
+                                       batchId = DMaaPContext.getBatchID();
+                               }
+                       }
+
+                       // send the pending batch
+                       final int sizeNow = batch.size();
+                       if (sizeNow > 0) {
+                               String startTime = sdf.format(new Date());
+                               LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+                                               + batchId + "]");
+                               try {
+                                       // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
+                                       
+                                       ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                                       
+                                       for (message msg : batch) {
+                                               LogDetails logDetails = msg.getLogDetails();
+                                               LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+                                       }
+                               } catch (Exception excp) {
+                                       int status = HttpStatus.SC_NOT_FOUND;
+                                       String errorMsg = null;
+                                       if (excp instanceof CambriaApiException) {
+                                               status = ((CambriaApiException) excp).getStatus();
+                                               JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+                                               JSONObject errObject = new JSONObject(jsonTokener);
+                                               errorMsg = (String) errObject.get("message");
+                                       }
+
+                                       ErrorResponse errRes = new ErrorResponse(status,
+                                                       DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+                                                       "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+                                                                       + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+                                                       null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                                       ctx.getRequest().getRemoteHost(), null, null);
+                                       LOG.info(errRes.toString());
+                                       throw new CambriaApiException(errRes);
+                               }
+                               pms.clear();
+                               metricsSet.publishTick(sizeNow);
+                               count += sizeNow;
+                       
+                               String endTime = sdf.format(new Date());
+                               publishBatchCount = sizeNow;
+                               LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId
+                                               + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time="
+                                               + endTime + "]");
+                       }
+
+                       final long endMs = System.currentTimeMillis();
+                       final long totalMs = endMs - startMs;
+
+                       LOG.info("Published " + count + " msgs(with transaction id) in " + totalMs + " ms for topic " + topic);
+
+                       if (null != responseTransactionId) {
+                               ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId));
+                       }
+
+                       // build a response
+                       final JSONObject response = new JSONObject();
+                       response.put("count", count);
+                       response.put("transactionId", responseTransactionId);
+                       response.put("serverTimeMs", totalMs);
+                       respondOk(ctx, response);
+
+               } catch (Exception excp) {
+                       int status = HttpStatus.SC_NOT_FOUND;
+                       String errorMsg = null;
+                       if (excp instanceof CambriaApiException) {
+                               status = ((CambriaApiException) excp).getStatus();
+                               JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+                               JSONObject errObject = new JSONObject(jsonTokener);
+                               errorMsg = (String) errObject.get("message");
+                       }
+
+                       ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+                                       "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+                                                       + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+                                       null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                       ctx.getRequest().getRemoteHost(), null, null);
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               }
+       }
+
+       /**
+        * 
+        * @param msg
+        * @param topic
+        * @param request
+        * @param messageCreationTime
+        * @param messageSequence
+        * @param batchId
+        * @param transactionEnabled
+        */
+       private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
+                       final String messageCreationTime, final int messageSequence, final Long batchId,
+                       final boolean transactionEnabled) {
+               LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
+                               transactionEnabled);
+               logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
+               msg.setTransactionEnabled(transactionEnabled);
+               msg.setLogDetails(logDetails);
+       }
+
+       void respondOk(DMaaPContext ctx, JSONObject response) throws IOException {
+               DMaaPResponseBuilder.respondOk(ctx, response);
+       }
+
+       /**
+        * 
+        * @author anowarul.islam
+        *
+        */
+       private static class LogWrap {
+               private final String fId;
+
+               /**
+                * constructor initialization
+                * 
+                * @param topic
+                * @param cgroup
+                * @param cid
+                */
+               public LogWrap(String topic, String cgroup, String cid) {
+                       fId = "[" + topic + "/" + cgroup + "/" + cid + "] ";
+               }
+
+               /**
+                * 
+                * @param msg
+                */
+               public void info(String msg) {
+                       LOG.info(fId + msg);
+               }
+
+               /**
+                * 
+                * @param msg
+                * @param t
+                */
+               public void warn(String msg, Exception t) {
+                       LOG.warn(fId + msg, t);
+               }
+
+       }
+
+       public boolean isTransEnabled() {
+               String istransidUEBtopicreqd = getPropertyFromAJSCmap("transidUEBtopicreqd");
+               boolean istransidreqd = false;
+               if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true"))) {
+                       istransidreqd = true;
+               }
+
+               return istransidreqd;
+
+       }
+
+       private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
+                       final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
+               LogDetails logDetails = new LogDetails();
+               logDetails.setTopicId(topicName);
+               logDetails.setMessageTimestamp(messageTimestamp);
+               logDetails.setPublisherId(Utils.getUserApiKey(request));
+               logDetails.setPublisherIp(request.getRemoteHost());
+               logDetails.setMessageBatchId(batchId);
+               logDetails.setMessageSequence(String.valueOf(messageSequence));
+               logDetails.setTransactionEnabled(transactionEnabled);
+               logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
+               logDetails.setServerIp(request.getLocalAddr());
+               return logDetails;
+       }
+
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MMServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MMServiceImpl.java
new file mode 100644 (file)
index 0000000..bfa48cf
--- /dev/null
@@ -0,0 +1,596 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service.impl;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.service.standards.MimeTypes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.util.rrConvertor;
+import org.apache.http.HttpStatus;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.Consumer;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import org.onap.dmaap.dmf.mr.backends.MetricsSet;
+import org.onap.dmaap.dmf.mr.backends.Publisher.message;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
+import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode;
+import org.onap.dmaap.dmf.mr.exception.ErrorResponse;
+import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+import org.onap.dmaap.dmf.mr.resources.CambriaEventSet;
+import org.onap.dmaap.dmf.mr.resources.CambriaOutboundEventStream;
+import org.onap.dmaap.dmf.mr.service.MMService;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
+import org.onap.dmaap.dmf.mr.utils.Utils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Service;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.Context;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.LinkedList;
+
+
+@Service
+public class MMServiceImpl implements MMService {
+       private static final String BATCH_LENGTH = "event.batch.length";
+       private static final String TRANSFER_ENCODING = "Transfer-Encoding";
+       //private static final Logger LOG = Logger.getLogger(MMServiceImpl.class);
+       private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MMServiceImpl.class);
+       @Autowired
+       private DMaaPErrorMessages errorMessages;
+
+       @Autowired
+       @Qualifier("configurationReader")
+       private ConfigurationReader configReader;
+
+       // HttpServletRequest object
+       @Context
+       private HttpServletRequest request;
+
+       // HttpServletResponse object
+       @Context
+       private HttpServletResponse response;
+
+       @Override
+       public void addWhiteList() {
+
+       }
+
+       @Override
+       public void removeWhiteList() {
+
+       }
+
+       @Override
+       public void listWhiteList() {
+
+       }
+
+       @Override
+       public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
+                       throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
+                       CambriaApiException, IOException {
+
+               
+               final HttpServletRequest req = ctx.getRequest();
+               ByteArrayOutputStream baos = new ByteArrayOutputStream();
+
+               // was this host blacklisted?
+               final String remoteAddr = Utils.getRemoteAddress(ctx);
+               
+               if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
+
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+                                       DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+                                       "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
+                                       null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                       ctx.getRequest().getRemoteHost(), null, null);
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               }
+
+               int limit = CambriaConstants.kNoLimit;
+
+               if (req.getParameter("limit") != null) {
+                       limit = Integer.parseInt(req.getParameter("limit"));
+               }
+               limit = 1;
+               
+               int timeoutMs = CambriaConstants.kNoTimeout;
+               String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout");
+               if (strtimeoutMS != null)
+                       timeoutMs = Integer.parseInt(strtimeoutMS);
+               // int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout",
+               
+               if (req.getParameter("timeout") != null) {
+                       timeoutMs = Integer.parseInt(req.getParameter("timeout"));
+               }
+
+               // By default no filter is applied if filter is not passed as a
+               // parameter in the request URI
+               String topicFilter = CambriaConstants.kNoFilter;
+               if (null != req.getParameter("filter")) {
+                       topicFilter = req.getParameter("filter");
+               }
+               // pretty to print the messaages in new line
+               String prettyval = "0";
+               String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty");
+               if (null != strPretty)
+                       prettyval = strPretty;
+
+               String metaval = "0";
+               String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta");
+               if (null != strmeta)
+                       metaval = strmeta;
+
+               final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval);
+               // withMeta to print offset along with message
+               final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval);
+
+               // is this user allowed to read this topic?
+               //final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+               final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+
+               if (metatopic == null) {
+                       // no such topic.
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+                                       DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),
+                                       errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()),
+                                       topic, null, null, clientId, ctx.getRequest().getRemoteHost());
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               }
+               //String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "metrics.send.cambria.topic");
+               /*
+                * if (null==metricTopicname)
+                * metricTopicname="msgrtr.apinode.metrics.dmaap"; //else if(user!=null)
+                * if(null==ctx.getRequest().getHeader("Authorization")&&
+                * !topic.equalsIgnoreCase(metricTopicname)) { if (null !=
+                * metatopic.getOwner() && !("".equals(metatopic.getOwner()))){ // check
+                * permissions metatopic.checkUserRead(user); } }
+                */
+
+               Consumer c = null;
+               try {
+                       final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+                       c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs,ctx.getRequest().getRemoteHost());
+
+                       final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs)
+                                       .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build();
+                       coes.setDmaapContext(ctx);
+                       coes.setTopic(metatopic);
+
+                       DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+
+                       try {
+                               coes.write(baos);
+                       } catch (Exception ex) {
+
+                       }
+
+                       c.commitOffsets();
+                       final int sent = coes.getSentCount();
+
+                       metricsSet.consumeTick(sent);
+
+               } catch (UnavailableException excp) {
+
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+                                       DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+                                       errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
+                                       null, null, clientId, ctx.getRequest().getRemoteHost());
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+
+               } catch (CambriaApiException excp) {
+
+                       throw excp;
+               } catch (Exception excp) {
+
+                       ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
+
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+                                       DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+                                       "Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null,
+                                       Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost());
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               } finally {
+
+                       boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
+                       String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                                       ConsumerFactory.kSetting_EnableCache);
+                       if (null != strkSetting_EnableCache)
+                               kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
+
+                       if (!kSetting_EnableCache && (c != null)) {
+                               c.close();
+
+                       }
+               }
+               return baos.toString();
+       }
+
+       @Override
+       public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+                       final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+                                       CambriaApiException, IOException, missingReqdSetting {
+
+               //final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+               //final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+
+               final String remoteAddr = Utils.getRemoteAddress(ctx);
+
+               if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
+
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+                                       DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+                                       "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
+                                       null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                       ctx.getRequest().getRemoteHost(), null, null);
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               }
+
+               String topicNameStd = null;
+
+               topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
+                               "enforced.topic.name.AAF");
+               String metricTopicname = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+                               "metrics.send.cambria.topic");
+               if (null == metricTopicname)
+                       metricTopicname = "msgrtr.apinode.metrics.dmaap";
+               boolean topicNameEnforced = false;
+               if (null != topicNameStd && topic.startsWith(topicNameStd)) {
+                       topicNameEnforced = true;
+               }
+
+               final HttpServletRequest req = ctx.getRequest();
+
+               boolean chunked = false;
+               if (null != req.getHeader(TRANSFER_ENCODING)) {
+                       chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
+               }
+
+               String mediaType = req.getContentType();
+               if (mediaType == null || mediaType.length() == 0) {
+                       mediaType = MimeTypes.kAppGenericBinary;
+               }
+
+               if (mediaType.contains("charset=UTF-8")) {
+                       mediaType = mediaType.replace("; charset=UTF-8", "").trim();
+               }
+
+               if (!topic.equalsIgnoreCase(metricTopicname)) {
+                       pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
+               } else {
+                       pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
+               }
+       }
+
+       private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
+                       final String messageCreationTime, final int messageSequence, final Long batchId,
+                       final boolean transactionEnabled) {
+               LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
+                               transactionEnabled);
+               logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
+               msg.setTransactionEnabled(transactionEnabled);
+               msg.setLogDetails(logDetails);
+       }
+
+       private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
+                       final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
+               LogDetails logDetails = new LogDetails();
+               logDetails.setTopicId(topicName);
+               logDetails.setMessageTimestamp(messageTimestamp);
+               logDetails.setPublisherId(Utils.getUserApiKey(request));
+               logDetails.setPublisherIp(request.getRemoteHost());
+               logDetails.setMessageBatchId(batchId);
+               logDetails.setMessageSequence(String.valueOf(messageSequence));
+               logDetails.setTransactionEnabled(transactionEnabled);
+               logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
+               logDetails.setServerIp(request.getLocalAddr());
+               return logDetails;
+       }
+
+       private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked,
+                       String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+                                       CambriaApiException, IOException {
+               final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+               // setup the event set
+               final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
+
+               // start processing, building a batch to push to the backend
+               final long startMs = System.currentTimeMillis();
+               long count = 0;
+
+               long maxEventBatch = 1024L * 16;
+               String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
+               if (null != batchlen)
+                       maxEventBatch = Long.parseLong(batchlen);
+
+               // long maxEventBatch =
+               // ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
+               final LinkedList<message> batch = new LinkedList<message>();
+               final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
+               //final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+
+               try {
+                       // for each message...
+                       message m = null;
+                       while ((m = events.next()) != null) {
+                               // add the message to the batch
+                               batch.add(m);
+                               final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
+                                               m.getMessage());
+                               // check if the batch is full
+                               final int sizeNow = batch.size();
+                               if (sizeNow > maxEventBatch) {
+                                       ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                                       pms.clear();
+                                       batch.clear();
+                                       metricsSet.publishTick(sizeNow);
+                                       count += sizeNow;
+                               }
+                       }
+
+                       // send the pending batch
+                       final int sizeNow = batch.size();
+                       if (sizeNow > 0) {
+                               ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                               pms.clear();
+                               batch.clear();
+                               metricsSet.publishTick(sizeNow);
+                               count += sizeNow;
+                       }
+
+                       final long endMs = System.currentTimeMillis();
+                       final long totalMs = endMs - startMs;
+
+                       LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
+
+                       // build a responseP
+                       final JSONObject response = new JSONObject();
+                       response.put("count", count);
+                       response.put("serverTimeMs", totalMs);
+                       // DMaaPResponseBuilder.respondOk(ctx, response);
+
+               } catch (Exception excp) {
+
+                       int status = HttpStatus.SC_NOT_FOUND;
+                       String errorMsg = null;
+                       if (excp.getClass().toString().contains("CambriaApiException")) {
+                               status = ((CambriaApiException) excp).getStatus();
+                               JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+                               JSONObject errObject = new JSONObject(jsonTokener);
+                               errorMsg = (String) errObject.get("message");
+
+                       }
+                       ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+                                       errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count
+                                                       + "." + errorMsg,
+                                       null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null,
+                                       null);
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+
+               }
+       }
+
+       private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
+                       final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
+                                       throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
+                                       CambriaApiException {
+
+               final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+               // setup the event set
+               final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
+
+               // start processing, building a batch to push to the backend
+               final long startMs = System.currentTimeMillis();
+               long count = 0;
+        long maxEventBatch = 1024L * 16L;
+               String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
+               if (null != evenlen)
+                       maxEventBatch = Long.parseLong(evenlen);
+
+               final LinkedList<message> batch = new LinkedList<message>();
+               final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
+
+               message m = null;
+               int messageSequence = 1;
+               Long batchId = 1L;
+               final boolean transactionEnabled = true;
+               int publishBatchCount = 0;
+               SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
+
+               // LOG.warn("Batch Start Id: " +
+               // Utils.getFromattedBatchSequenceId(batchId));
+               try {
+                       // for each message...
+                       batchId = DMaaPContext.getBatchID();
+
+                       String responseTransactionId = null;
+
+                       while ((m = events.next()) != null) {
+
+                               // LOG.warn("Batch Start Id: " +
+                               // Utils.getFromattedBatchSequenceId(batchId));
+
+                               addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
+                                               transactionEnabled);
+                               messageSequence++;
+
+                               // add the message to the batch
+                               batch.add(m);
+
+                               responseTransactionId = m.getLogDetails().getTransactionId();
+
+                               JSONObject jsonObject = new JSONObject();
+                               jsonObject.put("message", m.getMessage());
+                               jsonObject.put("transactionId", responseTransactionId);
+                               final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
+                                               m.getMessage());
+                               pms.add(data);
+
+                               // check if the batch is full
+                               final int sizeNow = batch.size();
+                               if (sizeNow >= maxEventBatch) {
+                                       String startTime = sdf.format(new Date());
+                                       LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+                                                       + batchId + "]");
+                                       try {
+                                               ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                                               // transactionLogs(batch);
+                                               for (message msg : batch) {
+                                                       LogDetails logDetails = msg.getLogDetails();
+                                                       LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+                                               }
+                                       } catch (Exception excp) {
+
+                                               int status = HttpStatus.SC_NOT_FOUND;
+                                               String errorMsg = null;
+                                               if (excp.getClass().toString().contains("CambriaApiException")) {
+                                                       status = ((CambriaApiException) excp).getStatus();
+                                                       JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+                                                       JSONObject errObject = new JSONObject(jsonTokener);
+                                                       errorMsg = (String) errObject.get("message");
+                                               }
+                                               ErrorResponse errRes = new ErrorResponse(status,
+                                                               DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+                                                               "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+                                                                               + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+                                                               null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                                               ctx.getRequest().getRemoteHost(), null, null);
+                                               LOG.info(errRes.toString());
+                                               throw new CambriaApiException(errRes);
+                                       }
+                                       pms.clear();
+                                       batch.clear();
+                                       metricsSet.publishTick(sizeNow);
+                                       publishBatchCount = sizeNow;
+                                       count += sizeNow;
+                                       // batchId++;
+                                       String endTime = sdf.format(new Date());
+                                       LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
+                                                       + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
+                                                       + ",Batch End Time=" + endTime + "]");
+                                       batchId = DMaaPContext.getBatchID();
+                               }
+                       }
+
+                       // send the pending batch
+                       final int sizeNow = batch.size();
+                       if (sizeNow > 0) {
+                               String startTime = sdf.format(new Date());
+                               LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+                                               + batchId + "]");
+                               try {
+                                       ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+                                       // transactionLogs(batch);
+                                       for (message msg : batch) {
+                                               LogDetails logDetails = msg.getLogDetails();
+                                               LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+                                       }
+                               } catch (Exception excp) {
+                                       int status = HttpStatus.SC_NOT_FOUND;
+                                       String errorMsg = null;
+                                       if (excp.getClass().toString().contains("CambriaApiException")) {
+                                               status = ((CambriaApiException) excp).getStatus();
+                                               JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+                                               JSONObject errObject = new JSONObject(jsonTokener);
+                                               errorMsg = (String) errObject.get("message");
+                                       }
+
+                                       ErrorResponse errRes = new ErrorResponse(status,
+                                                       DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+                                                       "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+                                                                       + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+                                                       null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                                       ctx.getRequest().getRemoteHost(), null, null);
+                                       LOG.info(errRes.toString());
+                                       throw new CambriaApiException(errRes);
+                               }
+                               pms.clear();
+                               metricsSet.publishTick(sizeNow);
+                               count += sizeNow;
+                               // batchId++;
+                               String endTime = sdf.format(new Date());
+                               publishBatchCount = sizeNow;
+                               LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId
+                                               + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time="
+                                               + endTime + "]");
+                       }
+
+                       final long endMs = System.currentTimeMillis();
+                       final long totalMs = endMs - startMs;
+
+                       LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
+
+                       // build a response
+                       final JSONObject response = new JSONObject();
+                       response.put("count", count);
+                       response.put("serverTimeMs", totalMs);
+
+               } catch (Exception excp) {
+                       int status = HttpStatus.SC_NOT_FOUND;
+                       String errorMsg = null;
+                       if (excp.getClass().toString().contains("CambriaApiException")) {
+                               status = ((CambriaApiException) excp).getStatus();
+                               JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+                               JSONObject errObject = new JSONObject(jsonTokener);
+                               errorMsg = (String) errObject.get("message");
+                       }
+
+                       ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+                                       "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+                                                       + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+                                       null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+                                       ctx.getRequest().getRemoteHost(), null, null);
+                       LOG.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               }
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MetricsServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MetricsServiceImpl.java
new file mode 100644 (file)
index 0000000..3774a47
--- /dev/null
@@ -0,0 +1,114 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service.impl;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.metrics.CdmMeasuredItem;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.MetricsSet;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.service.MetricsService;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
+import org.springframework.stereotype.Component;
+
+import java.io.IOException;
+
+/**
+ * 
+ * 
+ * This will provide all the generated metrics details also it can provide the
+ * get metrics details
+ * 
+ * 
+ * @author nilanjana.maity
+ *
+ *
+ */
+@Component
+public class MetricsServiceImpl implements MetricsService {
+
+       
+       private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MetricsService.class);
+       /**
+        * 
+        * 
+        * @param ctx
+        * @throws IOException
+        * 
+        * 
+        * get Metric details
+        * 
+        */
+       @Override
+       
+       public void get(DMaaPContext ctx) throws IOException {
+               LOG.info("Inside  : MetricsServiceImpl : get()");
+               final MetricsSet metrics = ctx.getConfigReader().getfMetrics();
+               DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+               final JSONObject result = metrics.toJson();
+               DMaaPResponseBuilder.respondOk(ctx, result);
+               LOG.info("============ Metrics generated : " + result.toString() + "=================");
+
+       }
+
+
+       @Override
+       /**
+        * 
+        * get Metric by name
+        * 
+        * 
+        * @param ctx
+        * @param name
+        * @throws IOException
+        * @throws CambriaApiException
+        * 
+        * 
+        */
+       public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException {
+               LOG.info("Inside  : MetricsServiceImpl : getMetricByName()");
+               final MetricsSet metrics = ctx.getConfigReader().getfMetrics();
+
+               final CdmMeasuredItem item = metrics.getItem(name);
+               /**
+                * check if item is null
+                */
+               if (item == null) {
+                       throw new CambriaApiException(404, "No metric named [" + name + "].");
+               }
+
+               final JSONObject entry = new JSONObject();
+               entry.put("summary", item.summarize());
+               entry.put("raw", item.getRawValueString());
+
+               DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+
+               final JSONObject result = new JSONObject();
+               result.put(name, entry);
+
+               DMaaPResponseBuilder.respondOk(ctx, result);
+               LOG.info("============ Metrics generated : " + entry.toString() + "=================");
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImpl.java
new file mode 100644 (file)
index 0000000..2235098
--- /dev/null
@@ -0,0 +1,637 @@
+/*
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Copyright (C) 2019 Nokia Intellectual Property. All rights reserved.
+ * =================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ */
+package org.onap.dmaap.dmf.mr.service.impl;
+
+import com.att.ajsc.beans.PropertiesMapBean;
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import joptsimple.internal.Strings;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.math.NumberUtils;
+import org.apache.http.HttpStatus;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.exception.DMaaPAccessDeniedException;
+import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
+import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode;
+import org.onap.dmaap.dmf.mr.exception.ErrorResponse;
+import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException;
+import org.onap.dmaap.dmf.mr.metabroker.Broker1;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticator;
+import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticatorImpl;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.dmf.mr.service.TopicService;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
+import org.onap.dmaap.dmf.mr.utils.Utils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import javax.servlet.http.HttpServletRequest;
+import java.io.IOException;
+import java.security.Principal;
+
+/**
+ * @author muzainulhaque.qazi
+ *
+ */
+@Service
+public class TopicServiceImpl implements TopicService {
+
+       private static final String TOPIC_CREATE_OP = "create";
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(TopicServiceImpl.class);
+       @Autowired
+       private DMaaPErrorMessages errorMessages;
+
+       public DMaaPErrorMessages getErrorMessages() {
+               return errorMessages;
+       }
+
+       public void setErrorMessages(DMaaPErrorMessages errorMessages) {
+               this.errorMessages = errorMessages;
+       }
+
+
+  String getPropertyFromAJSCbean(String propertyKey) {
+               return PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, propertyKey);
+       }
+
+       String getPropertyFromAJSCmap(String propertyKey) {
+               return AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, propertyKey);
+       }
+
+       NsaApiKey getDmaapAuthenticatedUser(DMaaPContext dmaapContext) {
+               return DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+       }
+
+       void respondOk(DMaaPContext context, String msg) {
+               DMaaPResponseBuilder.respondOkWithHtml(context, msg);
+       }
+
+       void respondOk(DMaaPContext context, JSONObject json) throws IOException {
+               DMaaPResponseBuilder.respondOk(context, json);
+       }
+
+       boolean isCadiEnabled() {
+               return Utils.isCadiEnabled();
+       }
+       /**
+        * @param dmaapContext
+        * @throws JSONException
+        * @throws ConfigDbException
+        * @throws IOException
+        * 
+        */
+       @Override
+       public void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException {
+               LOGGER.info("Fetching list of all the topics.");
+               JSONObject json = new JSONObject();
+
+               JSONArray topicsList = new JSONArray();
+
+               for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) {
+                       topicsList.put(topic.getName());
+               }
+
+               json.put("topics", topicsList);
+
+               LOGGER.info("Returning list of all the topics.");
+               respondOk(dmaapContext, json);
+
+       }
+
+       /**
+        * @param dmaapContext
+        * @throws JSONException
+        * @throws ConfigDbException
+        * @throws IOException
+        * 
+        */
+       public void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException {
+
+               LOGGER.info("Fetching list of all the topics.");
+               JSONObject json = new JSONObject();
+
+               JSONArray topicsList = new JSONArray();
+
+               for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) {
+                       JSONObject obj = new JSONObject();
+                       obj.put("topicName", topic.getName());
+                       
+                       obj.put("owner", topic.getOwner());
+                       obj.put("txenabled", topic.isTransactionEnabled());
+                       topicsList.put(obj);
+               }
+
+               json.put("topics", topicsList);
+
+               LOGGER.info("Returning list of all the topics.");
+               respondOk(dmaapContext, json);
+
+       }
+
+       /**
+        * @param dmaapContext
+        * @param topicName
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws TopicExistsException
+        */
+       @Override
+       public void getTopic(DMaaPContext dmaapContext, String topicName)
+                       throws ConfigDbException, IOException, TopicExistsException {
+
+               LOGGER.info("Fetching details of topic " + topicName);
+               Topic t = getMetaBroker(dmaapContext).getTopic(topicName);
+
+               if (null == t) {
+                       LOGGER.error("Topic [" + topicName + "] does not exist.");
+                       throw new TopicExistsException("Topic [" + topicName + "] does not exist.");
+               }
+
+               JSONObject o = new JSONObject();
+               o.put("name", t.getName());
+               o.put("description", t.getDescription());
+
+               if (null != t.getOwners())
+                       o.put("owner", t.getOwners().iterator().next());
+               if (null != t.getReaderAcl())
+                       o.put("readerAcl", aclToJson(t.getReaderAcl()));
+               if (null != t.getWriterAcl())
+                       o.put("writerAcl", aclToJson(t.getWriterAcl()));
+
+               LOGGER.info("Returning details of topic " + topicName);
+               respondOk(dmaapContext, o);
+
+       }
+
+       /**
+        * @param dmaapContext
+        * @param topicBean
+        * @throws CambriaApiException
+        * @throws AccessDeniedException
+        * @throws IOException
+        * @throws TopicExistsException
+        * @throws JSONException
+        * 
+        * 
+        * 
+        */
+       @Override
+       public void createTopic(DMaaPContext dmaapContext, TopicBean topicBean) throws CambriaApiException, IOException {
+               String topicName = topicBean.getTopicName();
+               LOGGER.info("Creating topic {}",topicName);
+               String key = authorizeClient(dmaapContext, topicName, TOPIC_CREATE_OP);
+
+               try {
+                       final int partitions = getValueOrDefault(topicBean.getPartitionCount(), "default.partitions");
+                       final int replicas = getValueOrDefault(topicBean.getReplicationCount(), "default.replicas");
+
+                       final Topic t = getMetaBroker(dmaapContext).createTopic(topicName, topicBean.getTopicDescription(),
+                               key, partitions, replicas, topicBean.isTransactionEnabled());
+
+                       LOGGER.info("Topic {} created successfully. Sending response", topicName);
+                       respondOk(dmaapContext, topicToJson(t));
+               } catch (JSONException ex) {
+
+                       LOGGER.error("Failed to create topic "+ topicName +". Couldn't parse JSON data.", ex);
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,
+                                       DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson());
+                       LOGGER.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+
+               } catch (ConfigDbException ex) {
+
+                       LOGGER.error("Failed to create topic "+ topicName +".  Config DB Exception", ex);
+                       ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,
+                                       DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson());
+                       LOGGER.info(errRes.toString());
+                       throw new CambriaApiException(errRes);
+               } catch (Broker1.TopicExistsException ex) {
+                       LOGGER.error( "Failed to create topic "+ topicName +".  Topic already exists.",ex);
+               }
+       }
+
+       private String authorizeClient(DMaaPContext dmaapContext, String topicName, String operation) throws DMaaPAccessDeniedException {
+               String clientId = Strings.EMPTY;
+               if(isCadiEnabled() && isTopicWithEnforcedAuthorization(topicName)) {
+                       LOGGER.info("Performing AAF authorization for topic {} creation.", topicName);
+                       String permission = buildPermission(topicName, operation);
+                       DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+                       clientId = getAAFclientId(dmaapContext.getRequest());
+
+                       if (!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) {
+                               LOGGER.error("Failed to {} topic {}. Authorization failed for client {} and permission {}",
+                                       operation, topicName, clientId, permission);
+                               throw new DMaaPAccessDeniedException(new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+                                       DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+                                       "Failed to "+ operation +" topic: Access Denied. User does not have permission to create topic with perm " + permission));
+                       }
+               } else if(operation.equals(TOPIC_CREATE_OP)){
+                       final NsaApiKey user = getDmaapAuthenticatedUser(dmaapContext);
+                       clientId = (user != null) ? user.getKey() : Strings.EMPTY;
+               }
+               return clientId;
+       }
+
+       private String getAAFclientId(HttpServletRequest request) {
+               Principal principal = request.getUserPrincipal();
+               if (principal !=null) {
+                       return principal.getName();
+               } else {
+                       LOGGER.warn("Performing AAF authorization but user has not been provided in request.");
+                       return null;
+               }
+       }
+
+       private boolean isTopicWithEnforcedAuthorization(String topicName) {
+               String enfTopicNamespace = getPropertyFromAJSCbean("enforced.topic.name.AAF");
+               return enfTopicNamespace != null && topicName.startsWith(enfTopicNamespace);
+       }
+
+       int getValueOrDefault(int value, String defaultProperty) {
+               int returnValue = value;
+               if (returnValue <= 0) {
+                       String defaultValue = getPropertyFromAJSCmap(defaultProperty);
+                       returnValue = StringUtils.isNotEmpty(defaultValue) ? NumberUtils.toInt(defaultValue) : 1;
+                       returnValue = (returnValue <= 0) ? 1 : returnValue;
+               }
+               return returnValue;
+       }
+
+       private String buildPermission(String topicName, String operation) {
+               String nameSpace = (topicName.indexOf('.') > 1) ?
+                       topicName.substring(0, topicName.lastIndexOf('.')) : "";
+
+               String mrFactoryValue = getPropertyFromAJSCmap("msgRtr.topicfactory.aaf");
+               return mrFactoryValue + nameSpace + "|" + operation;
+       }
+
+       /**
+        * @param dmaapContext
+        * @param topicName
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws TopicExistsException
+        * @throws CambriaApiException
+        * @throws AccessDeniedException
+        */
+       @Override
+       public void deleteTopic(DMaaPContext dmaapContext, String topicName) throws IOException, ConfigDbException,
+                       CambriaApiException, TopicExistsException, DMaaPAccessDeniedException, AccessDeniedException {
+
+               LOGGER.info(" Deleting topic " + topicName);
+               authorizeClient(dmaapContext, topicName, "destroy");
+
+               final Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+               if (topic == null) {
+                       LOGGER.error("Failed to delete topic. Topic [" + topicName + "] does not exist.");
+                       throw new TopicExistsException("Failed to delete topic. Topic [" + topicName + "] does not exist.");
+               }
+
+               // metabroker.deleteTopic(topicName);
+
+               LOGGER.info("Topic [" + topicName + "] deleted successfully. Sending response.");
+               respondOk(dmaapContext, "Topic [" + topicName + "] deleted successfully");
+       }
+
+       /**
+        * 
+        * @param dmaapContext
+        * @return
+        */
+       DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) {
+               return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker();
+       }
+
+       /**
+        * @param dmaapContext
+        * @param topicName
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws TopicExistsException
+        * 
+        */
+       @Override
+       public void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName)
+                       throws ConfigDbException, IOException, TopicExistsException {
+               LOGGER.info("Retrieving list of all the publishers for topic " + topicName);
+               Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+               if (topic == null) {
+                       LOGGER.error("Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist.");
+                       throw new TopicExistsException(
+                                       "Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist.");
+               }
+
+               final NsaAcl acl = topic.getWriterAcl();
+
+               LOGGER.info("Returning list of all the publishers for topic " + topicName + ". Sending response.");
+               respondOk(dmaapContext, aclToJson(acl));
+
+       }
+
+       /**
+        * 
+        * @param acl
+        * @return
+        */
+       private static JSONObject aclToJson(NsaAcl acl) {
+               final JSONObject o = new JSONObject();
+               if (acl == null) {
+                       o.put("enabled", false);
+                       o.put("users", new JSONArray());
+               } else {
+                       o.put("enabled", acl.isActive());
+
+                       final JSONArray a = new JSONArray();
+                       for (String user : acl.getUsers()) {
+                               a.put(user);
+                       }
+                       o.put("users", a);
+               }
+               return o;
+       }
+
+       /**
+        * @param dmaapContext
+        * @param topicName
+        */
+       @Override
+       public void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName)
+                       throws IOException, ConfigDbException, TopicExistsException {
+               LOGGER.info("Retrieving list of all the consumers for topic " + topicName);
+               Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+               if (topic == null) {
+                       LOGGER.error("Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist.");
+                       throw new TopicExistsException(
+                                       "Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist.");
+               }
+
+               final NsaAcl acl = topic.getReaderAcl();
+
+               LOGGER.info("Returning list of all the consumers for topic " + topicName + ". Sending response.");
+               respondOk(dmaapContext, aclToJson(acl));
+
+       }
+
+       /**
+        * 
+        * @param t
+        * @return
+        */
+       static JSONObject topicToJson(Topic t) {
+               final JSONObject o = new JSONObject();
+
+               o.put("name", t.getName());
+               o.put("description", t.getDescription());
+               o.put("owner", t.getOwner());
+               o.put("readerAcl", aclToJson(t.getReaderAcl()));
+               o.put("writerAcl", aclToJson(t.getWriterAcl()));
+
+               return o;
+       }
+
+       /**
+        * @param dmaapContext
+        *                      @param topicName @param producerId @throws
+        *            ConfigDbException @throws IOException @throws
+        *            TopicExistsException @throws AccessDeniedException @throws
+        * 
+        */
+       @Override
+       public void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
+                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, CambriaApiException {
+
+               LOGGER.info("Granting write access to producer [" + producerId + "] for topic " + topicName);
+               final NsaApiKey user = getDmaapAuthenticatedUser(dmaapContext);
+
+               
+               //
+               // LOGGER.info("Authenticating the user, as ACL authentication is not
+               
+               //// String permission =
+               
+               //
+               
+               
+               
+               // {
+               // LOGGER.error("Failed to permit write access to producer [" +
+               // producerId + "] for topic " + topicName
+               
+               // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+               // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+               // errorMessages.getNotPermitted1()+" <Grant publish permissions>
+               
+               
+               
+               // }
+               // }
+
+               Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+               if (null == topic) {
+                       LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic. Topic [" + topicName
+                                       + "] does not exist.");
+                       throw new TopicExistsException("Failed to permit write access to producer [" + producerId
+                                       + "] for topic. Topic [" + topicName + "] does not exist.");
+               }
+
+               topic.permitWritesFromUser(producerId, user);
+
+               LOGGER.info("Write access has been granted to producer [" + producerId + "] for topic [" + topicName
+                               + "]. Sending response.");
+               respondOk(dmaapContext, "Write access has been granted to publisher.");
+
+       }
+
+       /**
+        * @param dmaapContext
+        * @param topicName
+        * @param producerId
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws TopicExistsException
+        * @throws AccessDeniedException
+        * @throws DMaaPAccessDeniedException
+        * 
+        */
+       @Override
+       public void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
+                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,
+                       DMaaPAccessDeniedException {
+
+               LOGGER.info("Revoking write access to producer [" + producerId + "] for topic " + topicName);
+               final NsaApiKey user = getDmaapAuthenticatedUser(dmaapContext);
+               
+               //
+               //// String permission =
+               
+               // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+               // String permission = aaf.aafPermissionString(topicName, "manage");
+               // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+               // {
+               // LOGGER.error("Failed to revoke write access to producer [" +
+               // producerId + "] for topic " + topicName
+               
+               // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+               // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+               // errorMessages.getNotPermitted1()+" <Revoke publish permissions>
+               
+               
+               // throw new DMaaPAccessDeniedException(errRes);
+               //
+       
+               // }
+
+               Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+               if (null == topic) {
+                       LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic. Topic [" + topicName
+                                       + "] does not exist.");
+                       throw new TopicExistsException("Failed to revoke write access to producer [" + producerId
+                                       + "] for topic. Topic [" + topicName + "] does not exist.");
+               }
+
+               topic.denyWritesFromUser(producerId, user);
+
+               LOGGER.info("Write access has been revoked to producer [" + producerId + "] for topic [" + topicName
+                               + "]. Sending response.");
+               respondOk(dmaapContext, "Write access has been revoked for publisher.");
+
+       }
+
+       /**
+        * @param dmaapContext
+        * @param topicName
+        * @param consumerId
+        * @throws DMaaPAccessDeniedException
+        */
+       @Override
+       public void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
+                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,
+                       DMaaPAccessDeniedException {
+
+               LOGGER.info("Granting read access to consumer [" + consumerId + "] for topic " + topicName);
+               final NsaApiKey user = getDmaapAuthenticatedUser(dmaapContext);
+               
+               //
+               //// String permission =
+               
+               
+               // String permission = aaf.aafPermissionString(topicName, "manage");
+               // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+               // {
+               // LOGGER.error("Failed to permit read access to consumer [" +
+               // consumerId + "] for topic " + topicName
+               
+               // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+               // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+               // errorMessages.getNotPermitted1()+" <Grant consume permissions>
+               
+               
+               
+               // }
+               // }
+
+               Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+               if (null == topic) {
+                       LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic. Topic [" + topicName
+                                       + "] does not exist.");
+                       throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId
+                                       + "] for topic. Topic [" + topicName + "] does not exist.");
+               }
+
+               topic.permitReadsByUser(consumerId, user);
+
+               LOGGER.info("Read access has been granted to consumer [" + consumerId + "] for topic [" + topicName
+                               + "]. Sending response.");
+               respondOk(dmaapContext,
+                               "Read access has been granted for consumer [" + consumerId + "] for topic [" + topicName + "].");
+       }
+
+       /**
+        * @param dmaapContext
+        * @param topicName
+        * @param consumerId
+        * @throws DMaaPAccessDeniedException
+        */
+       @Override
+       public void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
+                       throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,
+                       DMaaPAccessDeniedException {
+
+               LOGGER.info("Revoking read access to consumer [" + consumerId + "] for topic " + topicName);
+               final NsaApiKey user = getDmaapAuthenticatedUser(dmaapContext);
+               
+               //// String permission =
+               
+               
+               // String permission = aaf.aafPermissionString(topicName, "manage");
+               // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+               // {
+               // LOGGER.error("Failed to revoke read access to consumer [" +
+               // consumerId + "] for topic " + topicName
+               
+               // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+               // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+               // errorMessages.getNotPermitted1()+" <Grant consume permissions>
+               
+               
+               // throw new DMaaPAccessDeniedException(errRes);
+               // }
+               //
+               //
+       
+               Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+               if (null == topic) {
+                       LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic. Topic [" + topicName
+                                       + "] does not exist.");
+                       throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId
+                                       + "] for topic. Topic [" + topicName + "] does not exist.");
+               }
+
+               topic.denyReadsByUser(consumerId, user);
+
+               LOGGER.info("Read access has been revoked to consumer [" + consumerId + "] for topic [" + topicName
+                               + "]. Sending response.");
+               respondOk(dmaapContext,
+                               "Read access has been revoked for consumer [" + consumerId + "] for topic [" + topicName + "].");
+
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TransactionServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TransactionServiceImpl.java
new file mode 100644 (file)
index 0000000..973a9eb
--- /dev/null
@@ -0,0 +1,99 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service.impl;
+
+import com.att.aft.dme2.internal.jettison.json.JSONException;
+import com.att.nsa.configs.ConfigDbException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.service.TransactionService;
+import org.onap.dmaap.dmf.mr.transaction.TransactionObj;
+import org.springframework.stereotype.Service;
+
+import java.io.IOException;
+
+/**
+ * Once the transaction rest gateway will be using that time it will provide all
+ * the transaction details like fetching all the transactional objects or get
+ * any particular transaction object details
+ * 
+ * @author nilanjana.maity
+ *
+ */
+@Service
+public class TransactionServiceImpl implements TransactionService {
+
+       @Override
+       public void checkTransaction(TransactionObj trnObj) {
+               /* Need to implement the method */
+       }
+
+       @Override
+       public void getAllTransactionObjs(DMaaPContext dmaapContext)
+                       throws ConfigDbException, IOException {
+
+               /*
+               
+                * 
+                * LOG.info("configReader : "+configReader.toString());
+                * 
+                * final JSONObject result = new JSONObject (); final JSONArray
+                * transactionIds = new JSONArray (); result.put ( "transactionIds",
+                * transactionIds );
+                * 
+                * DMaaPTransactionObjDB<DMaaPTransactionObj> transDb =
+                * configReader.getfTranDb();
+                * 
+                * for (String transactionId : transDb.loadAllTransactionObjs()) {
+                * transactionIds.put (transactionId); } LOG.info(
+                * "========== TransactionServiceImpl: getAllTransactionObjs: Transaction objects are : "
+                * + transactionIds.toString()+"===========");
+                * DMaaPResponseBuilder.respondOk(dmaapContext, result);
+                */
+       }
+
+       @Override
+       public void getTransactionObj(DMaaPContext dmaapContext,
+                       String transactionId) throws ConfigDbException, JSONException,
+                       IOException {
+
+               /*
+                
+                * 
+                * ConfigurationReader configReader = dmaapContext.getConfigReader();
+                * 
+                * DMaaPTransactionObj trnObj;
+                * 
+                * trnObj = configReader.getfTranDb().loadTransactionObj(transactionId);
+                * 
+                * 
+                * if (null != trnObj) { trnObj.serialize(); JSONObject result =
+                * trnObj.asJsonObject(); DMaaPResponseBuilder.respondOk(dmaapContext,
+                * result);
+                * LOG.info("========== TransactionServiceImpl: getTransactionObj : "+
+                * result.toString()+"==========="); return; }
+                * 
+                * } LOG.info(
+                * "========== TransactionServiceImpl: getTransactionObj: Error : Transaction object does not exist. "
+                * +"===========");
+                */
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/UIServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/UIServiceImpl.java
new file mode 100644 (file)
index 0000000..e9ca969
--- /dev/null
@@ -0,0 +1,209 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.service.impl;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import org.apache.kafka.common.errors.TopicExistsException;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+import org.onap.dmaap.dmf.mr.service.UIService;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
+import org.springframework.stereotype.Service;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+/**
+ * @author muzainulhaque.qazi
+ *
+ */
+@Service
+public class UIServiceImpl implements UIService {
+
+       
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(UIServiceImpl.class);
+       /**
+        * Returning template of hello page
+        * @param dmaapContext
+        * @throws IOException
+        */
+       @Override
+       public void hello(DMaaPContext dmaapContext) throws IOException {
+               LOGGER.info("Returning template of hello page.");
+               DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "templates/hello.html");
+       }
+
+       /**
+        * Fetching list of all api keys and returning in a templated form for display.
+        * @param dmaapContext
+        * @throws ConfigDbException
+        * @throws IOException
+        */
+       @Override
+       public void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException {
+               // TODO - We need to work on the templates and how data will be set in
+               // the template
+               LOGGER.info("Fetching list of all api keys and returning in a templated form for display.");
+               Map<String, NsaSimpleApiKey> keyMap = getApiKeyDb(dmaapContext).loadAllKeyRecords();
+
+               LinkedList<JSONObject> keyList = new LinkedList<>();
+
+               JSONObject jsonList = new JSONObject();
+
+               for (Entry<String, NsaSimpleApiKey> e : keyMap.entrySet()) {
+                       final NsaSimpleApiKey key = e.getValue();
+                       final JSONObject jsonObject = new JSONObject();
+                       jsonObject.put("key", key.getKey());
+                       jsonObject.put("email", key.getContactEmail());
+                       jsonObject.put("description", key.getDescription());
+                       keyList.add(jsonObject);
+               }
+
+               jsonList.put("apiKeys", keyList);
+
+               LOGGER.info("Returning list of all the api keys in JSON format for the template.");
+               // "templates/apiKeyList.html"
+               DMaaPResponseBuilder.respondOk(dmaapContext, jsonList);
+
+       }
+
+       /**
+        * @param dmaapContext
+        * @param apiKey
+        * @throws ConfigDbException 
+        * @throws IOException 
+        * @throws JSONException 
+        * @throws Exception
+        */
+       @Override
+       public void getApiKey(DMaaPContext dmaapContext, String apiKey) throws CambriaApiException, ConfigDbException, JSONException, IOException {
+               // TODO - We need to work on the templates and how data will be set in
+               // the template
+               LOGGER.info("Fetching detials of apikey: " + apiKey);
+               final NsaSimpleApiKey key = getApiKeyDb(dmaapContext).loadApiKey(apiKey);
+
+               if (null != key) {
+                       LOGGER.info("Details of apikey [" + apiKey + "] found. Returning response");
+                       DMaaPResponseBuilder.respondOk(dmaapContext, key.asJsonObject());
+               } else {
+                       LOGGER.info("Details of apikey [" + apiKey + "] not found. Returning response");
+                       throw new CambriaApiException(400,"Key [" + apiKey + "] not found.");
+               }
+
+       }
+
+       /**
+        * Fetching list of all the topics
+        * @param dmaapContext
+        * @throws ConfigDbException
+        * @throws IOException
+        */
+       @Override
+       public void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException {
+               // TODO - We need to work on the templates and how data will be set in
+               // the template
+               LOGGER.info("Fetching list of all the topics and returning in a templated form for display");
+               List<Topic> topicsList = getMetaBroker(dmaapContext).getAllTopics();
+
+               JSONObject jsonObject = new JSONObject();
+
+               JSONArray topicsArray = new JSONArray();
+
+               List<Topic> topicList = getMetaBroker(dmaapContext).getAllTopics();
+
+               for (Topic topic : topicList) {
+                       JSONObject obj = new JSONObject();
+                       obj.put("topicName", topic.getName());
+                       obj.put("description", topic.getDescription());
+                       obj.put("owner", topic.getOwner());
+                       topicsArray.put(obj);
+               }
+
+               jsonObject.put("topics", topicsList);
+
+               LOGGER.info("Returning the list of topics in templated format for display.");
+               DMaaPResponseBuilder.respondOk(dmaapContext, jsonObject);
+
+       }
+
+       /**
+        * @param dmaapContext
+        * @param topicName
+        * @throws ConfigDbException
+        * @throws IOException
+        * @throws TopicExistsException
+        */
+       @Override
+       public void getTopic(DMaaPContext dmaapContext, String topicName)
+                       throws ConfigDbException, IOException, TopicExistsException {
+               // TODO - We need to work on the templates and how data will be set in
+               // the template
+               LOGGER.info("Fetching detials of apikey: " + topicName);
+               Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+               if (null == topic) {
+                       LOGGER.error("Topic [" + topicName + "] does not exist.");
+                       throw new TopicExistsException("Topic [" + topicName + "] does not exist.");
+               }
+
+               JSONObject json = new JSONObject();
+               json.put("topicName", topic.getName());
+               json.put("description", topic.getDescription());
+               json.put("owner", topic.getOwner());
+
+               LOGGER.info("Returning details of topic [" + topicName + "]. Sending response.");
+               DMaaPResponseBuilder.respondOk(dmaapContext, json);
+
+       }
+
+       /**
+        * 
+        * @param dmaapContext
+        * @return
+        */
+       private NsaApiDb<NsaSimpleApiKey> getApiKeyDb(DMaaPContext dmaapContext) {
+               return dmaapContext.getConfigReader().getfApiKeyDb();
+
+       }
+
+       /**
+        * 
+        * @param dmaapContext
+        * @return
+        */
+       private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) {
+               return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker();
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionFactory.java
new file mode 100644 (file)
index 0000000..3a02252
--- /dev/null
@@ -0,0 +1,44 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.transaction;
+/**
+ * 
+ * @author anowarul.islam
+ *
+ * @param <K>
+ */
+public interface DMaaPTransactionFactory<K extends DMaaPTransactionObj> {
+
+       /**
+        * 
+        * @param data
+        * @return
+        */
+       K makeNewTransactionObj ( String data );
+       /**
+        * 
+        * @param id
+        * @return
+        */
+       K makeNewTransactionId ( String id );
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObj.java b/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObj.java
new file mode 100644 (file)
index 0000000..5da30d1
--- /dev/null
@@ -0,0 +1,84 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.transaction;
+
+import org.json.JSONObject;
+
+/**
+ * This is an interface for DMaaP transactional logging object class.
+ * @author nilanjana.maity
+ *
+ */
+public interface DMaaPTransactionObj {
+       /**
+        * This will get the transaction id
+        * @return id transactionId
+        */
+       String getId();
+       /**
+        * This will set the transaction id
+        * @param id transactionId
+        */
+       void setId(String id);
+       /**
+        * This will sync the transaction object mapping
+        * @return String or null
+        */
+       String serialize();
+       /**
+        * get the total message count once the publisher published
+        * @return long totalMessageCount
+        */
+       long getTotalMessageCount();
+       /**
+        * set the total message count once the publisher published
+        * @param totalMessageCount
+        */
+       void setTotalMessageCount(long totalMessageCount);
+       /**
+        * get the total Success Message Count once the publisher published
+        * @return getSuccessMessageCount
+        */
+       long getSuccessMessageCount();
+       /**
+        * set the total Success Message Count once the publisher published
+        * @param successMessageCount
+        */
+       void setSuccessMessageCount(long successMessageCount);
+       /**
+        * get the failure Message Count once the publisher published
+        * @return failureMessageCount
+        */
+       long getFailureMessageCount();
+       /**
+        * set the failure Message Count once the publisher published
+        * @param failureMessageCount
+        */
+       void setFailureMessageCount(long failureMessageCount);
+
+       /**
+        * wrapping the data into json object
+        * @return JSONObject
+        */
+       JSONObject asJsonObject();
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObjDB.java b/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObjDB.java
new file mode 100644 (file)
index 0000000..6d672d6
--- /dev/null
@@ -0,0 +1,86 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.transaction;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.NsaSecurityManagerException;
+
+import java.util.Set;
+
+
+/**
+ * Persistent storage for Transaction Object and secrets built over an abstract config db. Instances
+ * of this DB must support concurrent access.
+ * @author nilanjana.maity
+ *
+ * @param <K> DMaaPTransactionObj
+ */
+public interface DMaaPTransactionObjDB <K extends DMaaPTransactionObj> {
+
+
+       /**
+        * Create a new Transaction Object. If one exists, 
+        * @param id
+        * @return the new Transaction record
+        * @throws ConfigDbException 
+        */
+       K createTransactionObj (String id) throws KeyExistsException, ConfigDbException;
+
+
+       /**
+        * An exception to signal a Transaction object already exists 
+        * @author nilanjana.maity
+        *
+        */
+       public static class KeyExistsException extends NsaSecurityManagerException
+       {
+               /**
+                * If the key exists
+                * @param key
+                */
+               public KeyExistsException ( String key ) { super ( "Transaction Object " + key + " exists" ); }
+               private static final long serialVersionUID = 1L;
+       }
+
+       /**
+        * Save a Transaction Object record. This must be used after changing auxiliary data on the record.
+        * Note that the transaction must exist (via createTransactionObj). 
+        * @param transactionObj
+        * @throws ConfigDbException 
+        */
+       void saveTransactionObj ( K transactionObj ) throws ConfigDbException;
+       
+       /**
+        * Load an Transaction Object record based on the Transaction ID value
+        * @param transactionId
+        * @return a transaction record or null
+        * @throws ConfigDbException 
+        */
+       K loadTransactionObj ( String transactionId ) throws ConfigDbException;
+       
+       /**
+        * Load all Transaction objects.
+        * @return
+        * @throws ConfigDbException
+        */
+       Set<String> loadAllTransactionObjs () throws ConfigDbException;
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/transaction/TransactionObj.java b/src/main/java/org/onap/dmaap/dmf/mr/transaction/TransactionObj.java
new file mode 100644 (file)
index 0000000..660acec
--- /dev/null
@@ -0,0 +1,202 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.transaction;
+
+import org.json.JSONObject;
+
+/**
+ * This is the class which will have the transaction enabled logging object
+ * details
+ * 
+ * @author nilanjana.maity
+ *
+ */
+public class TransactionObj implements DMaaPTransactionObj {
+
+       private String id;
+       private String createTime;
+       private long totalMessageCount;
+       private long successMessageCount;
+       private long failureMessageCount;
+       private JSONObject fData = new JSONObject();
+       private TrnRequest trnRequest;
+       private static final String kAuxData = "transaction";
+
+       /**
+        * Initializing constructor  
+        * put the json data for transaction enabled logging
+        * 
+        * @param data
+        */
+       public TransactionObj(JSONObject data) {
+               fData = data;
+
+               // check for required fields (these throw if not present)
+               getId();
+               getTotalMessageCount();
+               getSuccessMessageCount();
+               getFailureMessageCount();
+
+               // make sure we've got an aux data object
+               final JSONObject aux = fData.optJSONObject(kAuxData);
+               if (aux == null) {
+                       fData.put(kAuxData, new JSONObject());
+               }
+       }
+
+       /**
+        * this constructor will have the details of transaction id,
+        * totalMessageCount successMessageCount, failureMessageCount to get the
+        * transaction object
+        * 
+        * @param id
+        * @param totalMessageCount
+        * @param successMessageCount
+        * @param failureMessageCount
+        */
+       public TransactionObj(String id, long totalMessageCount, long successMessageCount, long failureMessageCount) {
+               this.id = id;
+               this.totalMessageCount = totalMessageCount;
+               this.successMessageCount = successMessageCount;
+               this.failureMessageCount = failureMessageCount;
+
+       }
+
+       /**
+        * The constructor passing only transaction id
+        * 
+        * @param id
+        */
+       public TransactionObj(String id) {
+               this.id = id;
+       }
+
+       /**
+        * Wrapping the data into json object
+        * 
+        * @return JSONObject
+        */
+       public JSONObject asJsonObject() {
+               final JSONObject full = new JSONObject(fData, JSONObject.getNames(fData));
+               return full;
+       }
+
+       /**
+        * To get the transaction id
+        */
+       public String getId() {
+               return id;
+       }
+
+       /**
+        * To set the transaction id
+        */
+       public void setId(String id) {
+               this.id = id;
+       }
+
+       /**
+        * 
+        * @return
+        */
+       public String getCreateTime() {
+               return createTime;
+       }
+
+       /**
+        * 
+        * @param createTime
+        */
+       public void setCreateTime(String createTime) {
+               this.createTime = createTime;
+       }
+
+       @Override
+       public String serialize() {
+               fData.put("transactionId", id);
+               fData.put("totalMessageCount", totalMessageCount);
+               fData.put("successMessageCount", successMessageCount);
+               fData.put("failureMessageCount", failureMessageCount);
+               return fData.toString();
+       }
+
+       public long getTotalMessageCount() {
+               return totalMessageCount;
+       }
+
+       public void setTotalMessageCount(long totalMessageCount) {
+               this.totalMessageCount = totalMessageCount;
+       }
+
+       public long getSuccessMessageCount() {
+               return successMessageCount;
+       }
+
+       public void setSuccessMessageCount(long successMessageCount) {
+               this.successMessageCount = successMessageCount;
+       }
+
+       public long getFailureMessageCount() {
+               return failureMessageCount;
+       }
+
+       /**
+        * @param failureMessageCount
+        */
+       public void setFailureMessageCount(long failureMessageCount) {
+               this.failureMessageCount = failureMessageCount;
+       }
+
+       /**
+        * 
+        * @return JSOnObject fData
+        */
+       public JSONObject getfData() {
+               return fData;
+       }
+
+       /**
+        * set the json object into data
+        * 
+        * @param fData
+        */
+       public void setfData(JSONObject fData) {
+               this.fData = fData;
+       }
+
+       /**
+        * 
+        * @return
+        */
+       public TrnRequest getTrnRequest() {
+               return trnRequest;
+       }
+
+       /**
+        * 
+        * @param trnRequest
+        */
+       public void setTrnRequest(TrnRequest trnRequest) {
+               this.trnRequest = trnRequest;
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/transaction/TrnRequest.java b/src/main/java/org/onap/dmaap/dmf/mr/transaction/TrnRequest.java
new file mode 100644 (file)
index 0000000..138ebea
--- /dev/null
@@ -0,0 +1,183 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.transaction;
+
+/**
+ * Created for transaction enable logging details, this is nothing but a bean
+ * class.
+ * 
+ * @author nilanjana.maity
+ *
+ */
+public class TrnRequest {
+
+       private String id;
+       private String requestCreate;
+       private String requestHost;
+       private String serverHost;
+       private String messageProceed;
+       private String totalMessage;
+       private String clientType;
+       private String url;
+
+       /**
+        * 
+        * 
+        * 
+        * @return id
+        * 
+        */
+       public String getId() {
+               return id;
+       }
+
+       /**
+        * 
+        * 
+        * @param id
+        */
+       public void setId(String id) {
+               this.id = id;
+       }
+
+       /**
+        * 
+        * 
+        * @return requestCreate
+        */
+       public String getRequestCreate() {
+               return requestCreate;
+       }
+
+       /**
+        * 
+        * @param requestCreate
+        */
+       public void setRequestCreate(String requestCreate) {
+               this.requestCreate = requestCreate;
+       }
+
+       /**
+        * 
+        * @return
+        */
+       public String getRequestHost() {
+               return requestHost;
+       }
+
+       /**
+        * 
+        * @param requestHost
+        */
+       public void setRequestHost(String requestHost) {
+               this.requestHost = requestHost;
+       }
+
+       /**
+        * 
+        * 
+        * 
+        * @return
+        */
+       public String getServerHost() {
+               return serverHost;
+       }
+
+       /**
+        * 
+        * @param serverHost
+        */
+       public void setServerHost(String serverHost) {
+               this.serverHost = serverHost;
+       }
+
+       /**
+        * 
+        * 
+        * 
+        * @return
+        */
+       public String getMessageProceed() {
+               return messageProceed;
+       }
+
+       /**
+        * 
+        * @param messageProceed
+        */
+       public void setMessageProceed(String messageProceed) {
+               this.messageProceed = messageProceed;
+       }
+
+       /**
+        * 
+        * @return
+        */
+       public String getTotalMessage() {
+               return totalMessage;
+       }
+
+       /**
+        * 
+        * @param totalMessage
+        * 
+        * 
+        */
+       public void setTotalMessage(String totalMessage) {
+               this.totalMessage = totalMessage;
+       }
+
+       /**
+        * 
+        * @return
+        */
+       public String getClientType() {
+               return clientType;
+       }
+
+       /**
+        * 
+        * @param clientType
+        * 
+        */
+       public void setClientType(String clientType) {
+               this.clientType = clientType;
+       }
+
+       /**
+        * 
+        * @return
+        */
+       public String getUrl() {
+               return url;
+       }
+
+       /**
+        * 
+        * @param url
+        * 
+        */
+       public void setUrl(String url) {
+               this.url = url;
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java
new file mode 100644 (file)
index 0000000..268f0a4
--- /dev/null
@@ -0,0 +1,61 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.transaction.impl;
+
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionFactory;
+import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionObj;
+import org.onap.dmaap.dmf.mr.transaction.TransactionObj;
+
+/**
+ * A factory for the simple Transaction implementation
+ * 
+ * 
+ * @author nilanjana.maity
+ *
+ */
+public class DMaaPSimpleTransactionFactory implements DMaaPTransactionFactory<DMaaPTransactionObj> {
+       /**
+        * 
+        * @param data
+        * @return DMaaPTransactionObj
+        */
+       @Override
+       public DMaaPTransactionObj makeNewTransactionObj(String data) {
+               JSONObject jsonObject = new JSONObject(data);
+               return new TransactionObj(jsonObject.getString("transactionId"), jsonObject.getLong("totalMessageCount"),
+                               jsonObject.getLong("successMessageCount"), jsonObject.getLong("failureMessageCount"));
+       }
+
+       /**
+        * 
+        * @param id
+        * @return TransactionObj
+        * 
+        * 
+        */
+       @Override
+       public DMaaPTransactionObj makeNewTransactionId(String id) {
+               return new TransactionObj(id);
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/utils/ConfigurationReader.java b/src/main/java/org/onap/dmaap/dmf/mr/utils/ConfigurationReader.java
new file mode 100644 (file)
index 0000000..c9680c5
--- /dev/null
@@ -0,0 +1,493 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.utils;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.confimpl.MemConfigDb;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.limits.Blacklist;
+import com.att.nsa.security.NsaAuthenticatorService;
+import com.att.nsa.security.db.BaseNsaApiDbImpl;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
+import org.I0Itec.zkclient.ZkClient;
+import org.apache.curator.framework.CuratorFramework;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory;
+import org.onap.dmaap.dmf.mr.backends.MetricsSet;
+import org.onap.dmaap.dmf.mr.backends.Publisher;
+import org.onap.dmaap.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
+import org.onap.dmaap.dmf.mr.backends.memory.MemoryConsumerFactory;
+import org.onap.dmaap.dmf.mr.backends.memory.MemoryMetaBroker;
+import org.onap.dmaap.dmf.mr.backends.memory.MemoryQueue;
+import org.onap.dmaap.dmf.mr.backends.memory.MemoryQueuePublisher;
+import org.onap.dmaap.dmf.mr.beans.DMaaPCambriaLimiter;
+import org.onap.dmaap.dmf.mr.beans.DMaaPZkConfigDb;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.metabroker.Broker1;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator;
+import org.onap.dmaap.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import javax.servlet.ServletException;
+
+/**
+ * Class is created for all the configuration for rest and service layer
+ * integration.
+ *
+ */
+@Component
+public class ConfigurationReader {
+
+
+       private Broker1 fMetaBroker;
+       private ConsumerFactory fConsumerFactory;
+       private Publisher fPublisher;
+       private MetricsSet fMetrics;
+       @Autowired
+       private DMaaPCambriaLimiter fRateLimiter;
+       private NsaApiDb<NsaSimpleApiKey> fApiKeyDb;
+       
+       private DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager;
+       private NsaAuthenticatorService<NsaSimpleApiKey> nsaSecurityManager;
+       private static CuratorFramework curator;
+       private ZkClient zk;
+       private DMaaPZkConfigDb fConfigDb;
+       private MemoryQueue q;
+       private MemoryMetaBroker mmb;
+       private Blacklist fIpBlackList;
+       private Emailer fEmailer;
+
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
+       
+
+       /**
+        * constructor to initialize all the values
+        * 
+        * @param settings
+        * @param fMetrics
+        * @param zk
+        * @param fConfigDb
+        * @param fPublisher
+        * @param curator
+        * @param fConsumerFactory
+        * @param fMetaBroker
+        * @param q
+        * @param mmb
+        * @param fApiKeyDb
+        * @param fSecurityManager
+        * @throws missingReqdSetting
+        * @throws invalidSettingValue
+        * @throws ServletException
+        * @throws KafkaConsumerCacheException
+        * @throws ConfigDbException 
+        * @throws KeyExistsException 
+        */
+       @Autowired
+       public ConfigurationReader(@Qualifier("propertyReader") rrNvReadable settings,
+                       @Qualifier("dMaaPMetricsSet") MetricsSet fMetrics, @Qualifier("dMaaPZkClient") ZkClient zk,
+                       @Qualifier("dMaaPZkConfigDb") DMaaPZkConfigDb fConfigDb, @Qualifier("kafkaPublisher") Publisher fPublisher,
+                       @Qualifier("curator") CuratorFramework curator,
+                       @Qualifier("dMaaPKafkaConsumerFactory") ConsumerFactory fConsumerFactory,
+                       @Qualifier("dMaaPKafkaMetaBroker") Broker1 fMetaBroker,
+                       @Qualifier("q") MemoryQueue q,
+                       @Qualifier("mmb") MemoryMetaBroker mmb, @Qualifier("dMaaPNsaApiDb") NsaApiDb<NsaSimpleApiKey> fApiKeyDb,
+                       /*
+                        * @Qualifier("dMaaPTranDb")
+                        * DMaaPTransactionObjDB<DMaaPTransactionObj> fTranDb,
+                        */
+                       @Qualifier("dMaaPAuthenticatorImpl") DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager
+                       )
+                                       throws missingReqdSetting, invalidSettingValue, ServletException, KafkaConsumerCacheException, ConfigDbException, KeyExistsException {
+               
+               this.fMetrics = fMetrics;
+               this.zk = zk;
+               this.fConfigDb = fConfigDb;
+               this.fPublisher = fPublisher;
+               ConfigurationReader.curator = curator;
+               this.fConsumerFactory = fConsumerFactory;
+               this.fMetaBroker = fMetaBroker;
+               
+               this.q = q;
+               this.mmb = mmb;
+               this.fApiKeyDb = fApiKeyDb;
+               
+               this.fSecurityManager = fSecurityManager;
+               
+               long allowedtimeSkewMs=600000L;
+               String strallowedTimeSkewM= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"authentication.allowedTimeSkewMs");
+               if(null!=strallowedTimeSkewM)
+               {
+                       allowedtimeSkewMs= Long.parseLong(strallowedTimeSkewM);
+               }       
+       
+               //String strrequireSecureChannel= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"aauthentication.requireSecureChannel");
+               //if(strrequireSecureChannel!=null)requireSecureChannel=Boolean.parseBoolean(strrequireSecureChannel);
+               //this.nsaSecurityManager = new NsaAuthenticatorService<NsaSimpleApiKey>(this.fApiKeyDb, settings.getLong("authentication.allowedTimeSkewMs", 600000L), settings.getBoolean("authentication.requireSecureChannel", true));
+               //this.nsaSecurityManager = new NsaAuthenticatorService<NsaSimpleApiKey>(this.fApiKeyDb, allowedtimeSkewMs, requireSecureChannel);
+               
+               servletSetup();
+       }
+
+       protected void servletSetup()
+                       throws missingReqdSetting, invalidSettingValue, ServletException, ConfigDbException, KeyExistsException {
+               try {
+
+                       fMetrics.toJson();
+                       fMetrics.setupCambriaSender();
+                       // add the admin authenticator
+                       
+                                               final String adminSecret = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_AdminSecret);
+                                               
+                                               if ( adminSecret != null && adminSecret.length () > 0 )
+                                               {
+                                                               
+                                                               final NsaApiDb<NsaSimpleApiKey> adminDb = new BaseNsaApiDbImpl<NsaSimpleApiKey> ( new MemConfigDb(), new NsaSimpleApiKeyFactory() );
+                                                               adminDb.createApiKey ( "admin", adminSecret );
+                                                       
+                                                       fSecurityManager.addAuthenticator ( new DMaaPOriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
+                                               
+                                               }
+                                       
+                       // setup a backend
+                       
+                        String type = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kBrokerType);
+                       if (type==null){
+                               type = CambriaConstants.kBrokerType_Kafka;
+                       }
+                       if (CambriaConstants.kBrokerType_Kafka.equalsIgnoreCase(type)) {
+                               log.info("Broker Type is:" + CambriaConstants.kBrokerType_Kafka);
+                       } else if (CambriaConstants.kBrokerType_Memory.equalsIgnoreCase(type)) {
+                               log.info("Broker Type is:" + CambriaConstants.kBrokerType_Memory);
+                               fPublisher = new MemoryQueuePublisher(q, mmb);
+                               //Ramkumar remove below
+                       
+                               fConsumerFactory = new MemoryConsumerFactory(q);
+                       } else {
+                               throw new IllegalArgumentException(
+                                               "Unrecognized type for " + CambriaConstants.kBrokerType + ": " + type + ".");
+                       }
+                       fIpBlackList = new Blacklist ( getfConfigDb(), getfConfigDb().parse ( "/ipBlacklist" ) );
+                       this.fEmailer = new Emailer();
+                       log.info("Broker Type is:" + type);
+
+               } catch (SecurityException e) {
+                       throw new ServletException(e);
+               }
+       }
+
+       /**
+        * method returns metaBroker
+        * 
+        * @return
+        */
+       public Broker1 getfMetaBroker() {
+               return fMetaBroker;
+       }
+
+       /**
+        * method to set the metaBroker
+        * 
+        * @param fMetaBroker
+        */
+       public void setfMetaBroker(Broker1 fMetaBroker) {
+               this.fMetaBroker = fMetaBroker;
+       }
+
+       /**
+        * method to get ConsumerFactory Object
+        * 
+        * @return
+        */
+       public ConsumerFactory getfConsumerFactory() {
+               return fConsumerFactory;
+       }
+
+       /**
+        * method to set the consumerfactory object
+        * 
+        * @param fConsumerFactory
+        */
+       public void setfConsumerFactory(ConsumerFactory fConsumerFactory) {
+               this.fConsumerFactory = fConsumerFactory;
+       }
+
+       /**
+        * method to get Publisher object
+        * 
+        * @return
+        */
+       public Publisher getfPublisher() {
+               return fPublisher;
+       }
+
+       /**
+        * method to set Publisher object
+        * 
+        * @param fPublisher
+        */
+       public void setfPublisher(Publisher fPublisher) {
+               this.fPublisher = fPublisher;
+       }
+
+       /**
+        * method to get MetricsSet Object
+        * 
+        * @return
+        */
+       public MetricsSet getfMetrics() {
+               return fMetrics;
+       }
+
+       /**
+        * method to set MetricsSet Object
+        * 
+        * @param fMetrics
+        */
+       public void setfMetrics(MetricsSet fMetrics) {
+               this.fMetrics = fMetrics;
+       }
+
+       /**
+        * method to get DMaaPCambriaLimiter object
+        * 
+        * @return
+        */
+       public DMaaPCambriaLimiter getfRateLimiter() {
+               return fRateLimiter;
+       }
+
+       /**
+        * method to set DMaaPCambriaLimiter object
+        * 
+        * @param fRateLimiter
+        */
+       public void setfRateLimiter(DMaaPCambriaLimiter fRateLimiter) {
+               this.fRateLimiter = fRateLimiter;
+       }
+
+       /**
+        * Method to get DMaaPAuthenticator object
+        * 
+        * @return
+        */
+       public DMaaPAuthenticator<NsaSimpleApiKey> getfSecurityManager() {
+               return fSecurityManager;
+       }
+
+       /**
+        * method to set DMaaPAuthenticator object
+        * 
+        * @param fSecurityManager
+        */
+       public void setfSecurityManager(DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager) {
+               this.fSecurityManager = fSecurityManager;
+       }
+
+       /**
+        * method to get rrNvReadable object
+        * 
+        * @return
+        */
+       
+               
+       
+
+       /**
+        * method to set rrNvReadable object
+        * 
+        * @param settings
+        */
+       
+       
+       
+
+       /**
+        * method to get CuratorFramework object
+        * 
+        * @return
+        */
+       public static CuratorFramework getCurator() {
+               return curator;
+       }
+
+       /**
+        * method to set CuratorFramework object
+        * 
+        * @param curator
+        */
+       public static void setCurator(CuratorFramework curator) {
+               ConfigurationReader.curator = curator;
+       }
+
+       /**
+        * method to get ZkClient object
+        * 
+        * @return
+        */
+       public ZkClient getZk() {
+               return zk;
+       }
+
+       /**
+        * method to set ZkClient object
+        * 
+        * @param zk
+        */
+       public void setZk(ZkClient zk) {
+               this.zk = zk;
+       }
+
+       /**
+        * method to get DMaaPZkConfigDb object
+        * 
+        * @return
+        */
+       public DMaaPZkConfigDb getfConfigDb() {
+               return fConfigDb;
+       }
+
+       /**
+        * method to set DMaaPZkConfigDb object
+        * 
+        * @param fConfigDb
+        */
+       public void setfConfigDb(DMaaPZkConfigDb fConfigDb) {
+               this.fConfigDb = fConfigDb;
+       }
+
+       /**
+        * method to get MemoryQueue object
+        * 
+        * @return
+        */
+       public MemoryQueue getQ() {
+               return q;
+       }
+
+       /**
+        * method to set MemoryQueue object
+        * 
+        * @param q
+        */
+       public void setQ(MemoryQueue q) {
+               this.q = q;
+       }
+
+       /**
+        * method to get MemoryMetaBroker object
+        * 
+        * @return
+        */
+       public MemoryMetaBroker getMmb() {
+               return mmb;
+       }
+
+       /**
+        * method to set MemoryMetaBroker object
+        * 
+        * @param mmb
+        */
+       public void setMmb(MemoryMetaBroker mmb) {
+               this.mmb = mmb;
+       }
+
+       /**
+        * method to get NsaApiDb object
+        * 
+        * @return
+        */
+       public NsaApiDb<NsaSimpleApiKey> getfApiKeyDb() {
+               return fApiKeyDb;
+       }
+
+       /**
+        * method to set NsaApiDb object
+        * 
+        * @param fApiKeyDb
+        */
+       public void setfApiKeyDb(NsaApiDb<NsaSimpleApiKey> fApiKeyDb) {
+               this.fApiKeyDb = fApiKeyDb;
+       }
+
+       /*
+        * public DMaaPTransactionObjDB<DMaaPTransactionObj> getfTranDb() { return
+        * fTranDb; }
+        * 
+        * public void setfTranDb(DMaaPTransactionObjDB<DMaaPTransactionObj>
+        * fTranDb) { this.fTranDb = fTranDb; }
+        */
+       /**
+        * method to get the zookeeper connection String
+        * 
+        * @param settings
+        * @return
+        */
+       public static String getMainZookeeperConnectionString() {
+               
+               
+                String zkServername = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
+                if (zkServername==null){
+                        zkServername=CambriaConstants.kDefault_ZkConfigDbServers;
+                }
+                return zkServername;
+       }
+
+       public static String getMainZookeeperConnectionSRoot(){
+               String strVal=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot);
+       
+               if (null==strVal)
+                       strVal=CambriaConstants.kDefault_ZkConfigDbRoot;
+       
+               return strVal;
+       }
+       
+       public Blacklist getfIpBlackList() {
+               return fIpBlackList;
+       }
+
+       public void setfIpBlackList(Blacklist fIpBlackList) {
+               this.fIpBlackList = fIpBlackList;
+       }
+
+       public NsaAuthenticatorService<NsaSimpleApiKey> getNsaSecurityManager() {
+               return nsaSecurityManager;
+       }
+
+       public void setNsaSecurityManager(NsaAuthenticatorService<NsaSimpleApiKey> nsaSecurityManager) {
+               this.nsaSecurityManager = nsaSecurityManager;
+       }
+       
+       public Emailer getSystemEmailer()
+         {
+           return this.fEmailer;
+         }
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPCuratorFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPCuratorFactory.java
new file mode 100644 (file)
index 0000000..261331f
--- /dev/null
@@ -0,0 +1,68 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.utils;
+
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+
+/**
+ * 
+ * 
+ * @author anowarul.islam
+ *
+ *
+ */
+public class DMaaPCuratorFactory {
+       /**
+        * 
+        * method provide CuratorFramework object
+        * 
+        * @param settings
+        * @return
+        * 
+        * 
+        * 
+        */
+       public static CuratorFramework getCurator(rrNvReadable settings) {
+               String Setting_ZkConfigDbServers =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkConfigDbServers);
+                
+               if(null==Setting_ZkConfigDbServers)
+                        Setting_ZkConfigDbServers =CambriaConstants.kDefault_ZkConfigDbServers; 
+               
+               String strSetting_ZkSessionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs);
+               if (strSetting_ZkSessionTimeoutMs==null) strSetting_ZkSessionTimeoutMs = CambriaConstants.kDefault_ZkSessionTimeoutMs+"";
+               int Setting_ZkSessionTimeoutMs = Integer.parseInt(strSetting_ZkSessionTimeoutMs);
+               
+               String str_ZkConnectionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs);
+               if (str_ZkConnectionTimeoutMs==null) str_ZkConnectionTimeoutMs = CambriaConstants.kDefault_ZkConnectionTimeoutMs+"";
+               int setting_ZkConnectionTimeoutMs = Integer.parseInt(str_ZkConnectionTimeoutMs);
+               
+               
+               CuratorFramework curator = CuratorFrameworkFactory.newClient(
+                               Setting_ZkConfigDbServers,Setting_ZkSessionTimeoutMs,setting_ZkConnectionTimeoutMs
+                               ,new ExponentialBackoffRetry(1000, 5));
+               return curator;
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPResponseBuilder.java b/src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPResponseBuilder.java
new file mode 100644 (file)
index 0000000..3ca60b0
--- /dev/null
@@ -0,0 +1,363 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.utils;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+
+import javax.servlet.http.HttpServletResponse;
+import java.io.*;
+
+/**
+ * class is used to create response object which is given to user
+ * 
+ * @author nilanjana.maity
+ *
+ */
+
+public class DMaaPResponseBuilder {
+
+       
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPResponseBuilder.class);
+       protected static final int kBufferLength = 4096;
+
+       public static void setNoCacheHeadings(DMaaPContext ctx) {
+               HttpServletResponse response = ctx.getResponse();
+               response.addHeader("Cache-Control", "no-store, no-cache, must-revalidate");
+               response.addHeader("Pragma", "no-cache");
+               response.addHeader("Expires", "0");
+       }
+
+       /**
+        * static method is used to create response object associated with
+        * JSONObject
+        * 
+        * @param ctx
+        * @param result
+        * @throws JSONException
+        * @throws IOException
+        */
+       public static void respondOk(DMaaPContext ctx, JSONObject result) throws JSONException, IOException {
+
+               respondOkWithStream(ctx, "application/json", new ByteArrayInputStream(result.toString(4).getBytes()));
+
+       }
+
+       /**
+        * method used to set staus to 204
+        * 
+        * @param ctx
+        */
+       public static void respondOkNoContent(DMaaPContext ctx) {
+               try {
+                       ctx.getResponse().setStatus(204);
+               } catch (Exception excp) {
+                       log.error(excp.getMessage(), excp);
+               }
+       }
+
+       /**
+        * static method is used to create response object associated with html
+        * 
+        * @param ctx
+        * @param html
+        */
+       public static void respondOkWithHtml(DMaaPContext ctx, String html) {
+               try {
+                       respondOkWithStream(ctx, "text/html", new ByteArrayInputStream(html.toString().getBytes()));
+               } catch (Exception excp) {
+                       log.error(excp.getMessage(), excp);
+               }
+       }
+
+       /**
+        * method used to create response object associated with InputStream
+        * 
+        * @param ctx
+        * @param mediaType
+        * @param is
+        * @throws IOException
+        */
+       public static void respondOkWithStream(DMaaPContext ctx, String mediaType, final InputStream is)
+                       throws IOException {
+               /*
+                * creates response object associated with streamwriter
+                */
+               respondOkWithStream(ctx, mediaType, new StreamWriter() {
+
+                       public void write(OutputStream os) throws IOException {
+                               copyStream(is, os);
+                       }
+               });
+
+       }
+
+       /**
+        * 
+        * @param ctx
+        * @param mediaType
+        * @param writer
+        * @throws IOException
+        */
+       public static void respondOkWithStream(DMaaPContext ctx, String mediaType, StreamWriter writer) throws IOException {
+               ctx.getResponse().setStatus(200);
+               try(OutputStream os = getStreamForBinaryResponse(ctx, mediaType)) {
+                       writer.write(os);
+               }
+
+               
+       }
+
+       /**
+        * static method to create error objects
+        * 
+        * @param ctx
+        * @param errCode
+        * @param msg
+        */
+       public static void respondWithError(DMaaPContext ctx, int errCode, String msg) {
+               try {
+                       ctx.getResponse().sendError(errCode, msg);
+               } catch (IOException excp) {
+                       log.error(excp.getMessage(), excp);
+               }
+       }
+
+       /**
+        * method to create error objects
+        * 
+        * @param ctx
+        * @param errCode
+        * @param body
+        */
+       public static void respondWithError(DMaaPContext ctx, int errCode, JSONObject body) {
+               try {
+                       sendErrorAndBody(ctx, errCode, body.toString(4), "application/json");
+               } catch (Exception excp) {
+                       log.error(excp.getMessage(), excp);
+               }
+       }
+
+       /**
+        * static method creates error object in JSON
+        * 
+        * @param ctx
+        * @param errCode
+        * @param msg
+        */
+       public static void respondWithErrorInJson(DMaaPContext ctx, int errCode, String msg) {
+               try {
+                       JSONObject o = new JSONObject();
+                       o.put("status", errCode);
+                       o.put("message", msg);
+                       respondWithError(ctx, errCode, o);
+
+               } catch (Exception excp) {
+                       log.error(excp.getMessage(), excp);
+               }
+       }
+
+       /**
+        * static method used to copy the stream with the help of another method
+        * copystream
+        * 
+        * @param in
+        * @param out
+        * @throws IOException
+        */
+       public static void copyStream(InputStream in, OutputStream out) throws IOException {
+               copyStream(in, out, 4096);
+       }
+
+       /**
+        * static method to copy the streams
+        * 
+        * @param in
+        * @param out
+        * @param bufferSize
+        * @throws IOException
+        */
+       public static void copyStream(InputStream in, OutputStream out, int bufferSize) throws IOException {
+               byte[] buffer = new byte[bufferSize];
+               int len;
+               while ((len = in.read(buffer)) != -1) {
+                       out.write(buffer, 0, len);
+               }
+               out.close();
+       }
+
+       /**
+        * interface used to define write method for outputStream
+        */
+       public abstract static interface StreamWriter {
+               /**
+                * abstract method used to write the response
+                * 
+                * @param paramOutputStream
+                * @throws IOException
+                */
+               public abstract void write(OutputStream paramOutputStream) throws IOException;
+       }
+
+       /**
+        * static method returns stream for binary response
+        * 
+        * @param ctx
+        * @return
+        * @throws IOException
+        */
+       public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx) throws IOException {
+               return getStreamForBinaryResponse(ctx, "application/octet-stream");
+       }
+
+       /**
+        * static method returns stream for binaryResponses
+        * 
+        * @param ctx
+        * @param contentType
+        * @return
+        * @throws IOException
+        */
+       public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx, String contentType) throws IOException {
+               ctx.getResponse().setContentType(contentType);
+               
+
+               boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD")));
+               
+               if (fResponseEntityAllowed) {
+                       try(OutputStream os = ctx.getResponse().getOutputStream()){
+                               return os;
+                       }catch (Exception e){
+                               log.error("Exception in getStreamForBinaryResponse",e);
+                               throw new IOException();
+                       }
+               } else {
+                       try(OutputStream os = new NullStream()){
+                               return os;
+                       }catch (Exception e){
+                               log.error("Exception in getStreamForBinaryResponse",e);
+                               throw new IOException();
+                       }
+               }
+       }
+
+       /**
+        * 
+        * @author anowarul.islam
+        *
+        */
+       private static class NullStream extends OutputStream {
+               /**
+                * @param b
+                *            integer
+                */
+               public void write(int b) {
+               }
+       }
+
+       private static class NullWriter extends Writer {
+               /**
+                * write method
+                * @param cbuf
+                * @param off
+                * @param len
+                */
+               public void write(char[] cbuf, int off, int len) {
+               }
+
+               /**
+                * flush method
+                */
+               public void flush() {
+               }
+
+               /**
+                * close method
+                */
+               public void close() {
+               }
+       }
+
+       /**
+        * sttaic method fetch stream for text
+        * 
+        * @param ctx
+        * @param err
+        * @param content
+        * @param mimeType
+        */
+       public static void sendErrorAndBody(DMaaPContext ctx, int err, String content, String mimeType) {
+               try {
+                       setStatus(ctx, err);
+                       getStreamForTextResponse(ctx, mimeType).println(content);
+               } catch (IOException e) {
+                       log.error(new StringBuilder().append("Error sending error response: ").append(e.getMessage()).toString(),
+                                       e);
+               }
+       }
+
+       /**
+        * method to set the code
+        * 
+        * @param ctx
+        * @param code
+        */
+       public static void setStatus(DMaaPContext ctx, int code) {
+               ctx.getResponse().setStatus(code);
+       }
+
+       /**
+        * static method returns stream for text response
+        * 
+        * @param ctx
+        * @return
+        * @throws IOException
+        */
+       public static PrintWriter getStreamForTextResponse(DMaaPContext ctx) throws IOException {
+               return getStreamForTextResponse(ctx, "text/html");
+       }
+
+       /**
+        * static method returns stream for text response
+        * 
+        * @param ctx
+        * @param contentType
+        * @return
+        * @throws IOException
+        */
+       public static PrintWriter getStreamForTextResponse(DMaaPContext ctx, String contentType) throws IOException {
+               ctx.getResponse().setContentType(contentType);
+
+               PrintWriter pw = null;
+               boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD")));
+
+               if (fResponseEntityAllowed) {
+                       pw = ctx.getResponse().getWriter();
+               } else {
+                       pw = new PrintWriter(new NullWriter());
+               }
+               return pw;
+       }
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/utils/Emailer.java b/src/main/java/org/onap/dmaap/dmf/mr/utils/Emailer.java
new file mode 100644 (file)
index 0000000..6cf9b7e
--- /dev/null
@@ -0,0 +1,203 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.utils;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+
+import javax.mail.*;
+import javax.mail.internet.InternetAddress;
+import javax.mail.internet.MimeBodyPart;
+import javax.mail.internet.MimeMessage;
+import javax.mail.internet.MimeMultipart;
+import java.io.IOException;
+import java.util.Properties;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+/**
+ * Send an email from a message.
+ * 
+ * @author peter
+ */
+public class Emailer
+{
+       public static final String kField_To = "to";
+       public static final String kField_Subject = "subject";
+       public static final String kField_Message = "message";
+
+       public Emailer()
+       {
+               fExec = Executors.newCachedThreadPool ();
+       
+       }
+
+       public void send ( String to, String subj, String body ) throws IOException
+       {
+               final String[] addrs = to.split ( "," );
+
+               if ( to.length () > 0 )
+               {
+                       final MailTask mt = new MailTask ( addrs, subj, body );
+                       fExec.submit ( mt );
+               }
+               else
+               {
+                       log.warn ( "At least one address is required." );
+               }
+       }
+
+       public void close ()
+       {
+               fExec.shutdown ();
+       }
+
+       private final ExecutorService fExec;
+       
+
+       
+
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(Emailer.class);
+       
+       public static final String kSetting_MailAuthUser = "mailLogin";
+       public static final String kSetting_MailFromEmail = "mailFromEmail";
+       public static final String kSetting_MailFromName = "mailFromName";
+       public static final String kSetting_SmtpServer = "mailSmtpServer";
+       public static final String kSetting_SmtpServerPort = "mailSmtpServerPort";
+       public static final String kSetting_SmtpServerSsl = "mailSmtpServerSsl";
+       public static final String kSetting_SmtpServerUseAuth = "mailSmtpServerUseAuth";
+
+       private class MailTask implements Runnable
+       {
+               public MailTask ( String[] to, String subject, String msgBody )
+               {
+                       fToAddrs = to;
+                       fSubject = subject;
+                       fBody = msgBody;
+               }
+
+               private String getSetting ( String settingKey, String defval )
+               {
+                       
+                       String strSet = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,settingKey);
+                       if(strSet==null)strSet=defval;
+                       return strSet;
+               }
+
+               // we need to get setting values from the evaluator but also the channel config
+               private void makeSetting ( Properties props, String propKey, String settingKey, String defval )
+               {
+                       props.put ( propKey, getSetting ( settingKey, defval ) );
+               }
+
+               private void makeSetting ( Properties props, String propKey, String settingKey, int defval )
+               {
+                       makeSetting ( props, propKey, settingKey, "" + defval );
+               }
+
+               private void makeSetting ( Properties props, String propKey, String settingKey, boolean defval )
+               {
+                       makeSetting ( props, propKey, settingKey, "" + defval );
+               }
+
+               @Override
+               public void run ()
+               {
+                       final StringBuffer tag = new StringBuffer ();
+                       final StringBuffer addrList = new StringBuffer ();
+                       tag.append ( "(" );
+                       for ( String to : fToAddrs )
+                       {
+                               if ( addrList.length () > 0 )
+                               {
+                                       addrList.append ( ", " );
+                               }
+                               addrList.append ( to );
+                       }
+                       tag.append ( addrList.toString () );
+                       tag.append ( ") \"" );
+                       tag.append ( fSubject );
+                       tag.append ( "\"" );
+                       
+                       log.info ( "sending mail to " + tag );
+
+                       try
+                       {
+                               final Properties prop = new Properties ();
+                               makeSetting ( prop, "mail.smtp.port", kSetting_SmtpServerPort, 587 );
+                               prop.put ( "mail.smtp.socketFactory.fallback", "false" );
+                               prop.put ( "mail.smtp.quitwait", "false" );
+                               makeSetting ( prop, "mail.smtp.host", kSetting_SmtpServer, "smtp.it.onap.com" );
+                               makeSetting ( prop, "mail.smtp.auth", kSetting_SmtpServerUseAuth, true );
+                               makeSetting ( prop, "mail.smtp.starttls.enable", kSetting_SmtpServerSsl, true );
+
+                               final String un = getSetting ( kSetting_MailAuthUser, "" );
+                               final String value=(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"mailPassword")!=null)?AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"mailPassword"):"";
+                               final Session session = Session.getInstance ( prop,
+                                       new javax.mail.Authenticator()
+                                       {
+                                               @Override
+                                               protected PasswordAuthentication getPasswordAuthentication()
+                                               {
+                                                       return new PasswordAuthentication ( un, value );
+                                               }
+                                       }
+                               );
+                               
+                               final Message msg = new MimeMessage ( session );
+
+                               final InternetAddress from = new InternetAddress (
+                                       getSetting ( kSetting_MailFromEmail, "team@dmaap.mr.onap.com" ),
+                                       getSetting ( kSetting_MailFromName, "The GFP/SA2020 Team" ) );
+                               msg.setFrom ( from );
+                               msg.setReplyTo ( new InternetAddress[] { from } );
+                               msg.setSubject ( fSubject );
+
+                               for ( String toAddr : fToAddrs )
+                               {
+                                       final InternetAddress to = new InternetAddress ( toAddr );
+                                       msg.addRecipient ( Message.RecipientType.TO, to );
+                               }
+
+                               final Multipart multipart = new MimeMultipart ( "related" );
+                               final BodyPart htmlPart = new MimeBodyPart ();
+                               htmlPart.setContent ( fBody, "text/plain" );
+                               multipart.addBodyPart ( htmlPart );
+                               msg.setContent ( multipart );
+
+                               Transport.send ( msg );
+
+                               log.info ( "mailing " + tag + " off without error" );
+                       }
+                       catch ( Exception e )
+                       {
+                               log.warn ( "Exception caught for " + tag, e );
+                       }
+               }
+
+               private final String[] fToAddrs;
+               private final String fSubject;
+               private final String fBody;
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/utils/PropertyReader.java b/src/main/java/org/onap/dmaap/dmf/mr/utils/PropertyReader.java
new file mode 100644 (file)
index 0000000..51c3f6c
--- /dev/null
@@ -0,0 +1,125 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.utils;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.drumlin.till.nv.impl.nvReadableStack;
+
+import java.util.Map;
+
+/**
+ * 
+ * @author nilesh.labde
+ *
+ *
+ */
+public class PropertyReader extends nvReadableStack {
+       /**
+        * 
+        * initializing logger
+        * 
+        */
+       
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(PropertyReader.class);
+
+
+       /**
+        * constructor initialization
+        * 
+        * @throws loadException
+        * 
+        */
+       public PropertyReader() throws loadException {
+       
+               
+               
+       
+               
+       }
+
+       /**
+        * 
+        * 
+        * @param argMap
+        * @param key
+        * @param defaultValue
+        * @return
+        * 
+        */
+       @SuppressWarnings("unused")
+       private static String getSetting(Map<String, String> argMap, final String key, final String defaultValue) {
+               String val = (String) argMap.get(key);
+               if (null == val) {
+                       return defaultValue;
+               }
+               return val;
+       }
+
+       /**
+        * 
+        * @param resourceName
+        * @param clazz
+        * @return
+        * @exception MalformedURLException
+        * 
+        */
+       
+               
+               
+
+                       
+                       
+               
+
+               
+
+                       
+
+                               
+
+                               
+
+                               
+                       
+                                       
+                       
+               
+
+                       
+
+                       
+                               
+               
+
+                       
+
+                       
+                               
+               
+               
+                       
+       
+               
+       
+
+}
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/utils/Utils.java b/src/main/java/org/onap/dmaap/dmf/mr/utils/Utils.java
new file mode 100644 (file)
index 0000000..662f0f7
--- /dev/null
@@ -0,0 +1,182 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.dmf.mr.utils;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+
+import javax.servlet.http.HttpServletRequest;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.Principal;
+import java.text.DecimalFormat;
+import java.text.SimpleDateFormat;
+import java.util.*;
+
+/**
+ * This is an utility class for various operations for formatting
+ * @author nilanjana.maity
+ *
+ */
+public class Utils {
+
+       private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS";
+       public static final String CAMBRIA_AUTH_HEADER = "X-CambriaAuth";
+       private static final String AUTH_HEADER = "Authorization";
+       private static final String BATCH_ID_FORMAT = "000000";
+       private static final String X509_ATTR = "javax.servlet.request.X509Certificate";
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(Utils.class);
+
+       private Utils() {
+               super();
+       }
+
+       /**
+        * Formatting the date 
+        * @param date
+        * @return date or null
+        */
+       public static String getFormattedDate(Date date) {
+               SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
+               if (null != date){
+                       return sdf.format(date);
+               }
+               return null;
+       }
+       /**
+        * to get the details of User Api Key
+        * @param request
+        * @return authkey or null
+        */
+       public static String getUserApiKey(HttpServletRequest request) {
+               final String auth = request.getHeader(CAMBRIA_AUTH_HEADER);
+               if (null != auth) {
+                       final String[] splittedAuthKey = auth.split(":");
+                       return splittedAuthKey[0];
+               }else if (null != request.getHeader(AUTH_HEADER) || null != request.getAttribute(X509_ATTR)){
+                       /**
+                        * AAF implementation enhancement
+                        */
+                       Principal principal = request.getUserPrincipal();
+                       if(principal != null){
+                               String name = principal.getName();
+                               return name.substring(0, name.lastIndexOf('@'));
+                       }
+                       log.warn("No principal has been provided on HTTP request");
+               }
+               return null;
+       }
+
+
+       /**
+        * to format the batch sequence id
+        * @param batchId
+        * @return batchId
+        */
+       public static String getFromattedBatchSequenceId(Long batchId) {
+               DecimalFormat format = new DecimalFormat(BATCH_ID_FORMAT);
+               return format.format(batchId);
+       }
+
+       /**
+        * to get the message length in bytes
+        * @param message
+        * @return bytes or 0
+        */
+       public static long messageLengthInBytes(String message) {
+               if (null != message) {
+                       return message.getBytes().length;
+               }
+               return 0;
+       }
+       /**
+        * To get transaction id details
+        * @param transactionId
+        * @return transactionId or null
+        */
+       public static String getResponseTransactionId(String transactionId) {
+               if (null != transactionId && !transactionId.isEmpty()) {
+                       return transactionId.substring(0, transactionId.lastIndexOf("::"));
+               }
+               return null;
+       }
+
+       /**
+        * get the thread sleep time
+        * @param ratePerMinute
+        * @return ratePerMinute or 0
+        */
+       public static long getSleepMsForRate ( double ratePerMinute )
+       {
+               if ( ratePerMinute <= 0.0 ) 
+               {
+                       return 0;
+               }
+               return Math.max ( 1000, Math.round ( 60 * 1000 / ratePerMinute ) );
+       }
+
+         public static String getRemoteAddress(DMaaPContext ctx)
+         {
+           String reqAddr = ctx.getRequest().getRemoteAddr();
+           String fwdHeader = getFirstHeader("X-Forwarded-For",ctx);
+           return ((fwdHeader != null) ? fwdHeader : reqAddr);
+         }
+         public static String getFirstHeader(String h,DMaaPContext ctx)
+         {
+           List l = getHeader(h,ctx);
+           return ((l.size() > 0) ? (String)l.iterator().next() : null);
+         }
+         public static List<String> getHeader(String h,DMaaPContext ctx)
+         {
+           LinkedList list = new LinkedList();
+           Enumeration e = ctx.getRequest().getHeaders(h);
+           while (e.hasMoreElements())
+           {
+             list.add(e.nextElement().toString());
+           }
+           return list;
+         }
+         
+         public static String getKafkaproperty(){
+                 InputStream input = new Utils().getClass().getResourceAsStream("/kafka.properties");
+                       Properties props = new Properties();
+                       try {
+                               props.load(input);
+                       } catch (IOException e) {
+                               log.error("failed to read kafka.properties");
+                       }
+                       return props.getProperty("key");
+                       
+                 
+         }
+         
+         public static boolean isCadiEnabled(){
+                 boolean enableCadi=false;
+                 if(System.getenv("enableCadi")!=null&&System.getenv("enableCadi").equals("true")){
+                         enableCadi=true;
+                       }
+                 
+                 return enableCadi;
+         }
+                 
+}
diff --git a/src/main/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java b/src/main/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java
new file mode 100644 (file)
index 0000000..7b19da1
--- /dev/null
@@ -0,0 +1,194 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.apiServer.metrics.cambria;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.apiServer.metrics.cambria.MetricsSender;
+import com.att.nsa.metrics.CdmMetricsRegistry;
+import com.att.nsa.metrics.impl.CdmConstant;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.metrics.publisher.CambriaPublisher;
+import org.onap.dmaap.dmf.mr.metrics.publisher.DMaaPCambriaClientFactory;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * MetricsSender will send the given metrics registry content as an event on the
+ * Cambria event broker to the given topic.
+ * 
+ * @author peter
+ *
+ */
+public class DMaaPMetricsSender implements Runnable {
+       public static final String kSetting_CambriaEnabled = "metrics.send.cambria.enabled";
+       public static final String kSetting_CambriaBaseUrl = "metrics.send.cambria.baseUrl";
+       public static final String kSetting_CambriaTopic = "metrics.send.cambria.topic";
+       public static final String kSetting_CambriaSendFreqSecs = "metrics.send.cambria.sendEverySeconds";
+
+       /**
+        * Schedule a periodic send of the given metrics registry using the given
+        * settings container for the Cambria location, topic, and send frequency.
+        * <br/>
+        * <br/>
+        * If the enabled flag is false, this method returns null.
+        * 
+        * @param scheduler
+        * @param metrics
+        * @param settings
+        * @param defaultTopic
+        * @return a handle to the scheduled task
+        */
+       public static ScheduledFuture<?> sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics,
+                        String defaultTopic) {
+               log.info("Inside : DMaaPMetricsSender : sendPeriodically");
+       String  cambriaSetting= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled);
+       boolean setEnable=true;
+       if (cambriaSetting!=null && cambriaSetting.equals("false") )
+       setEnable= false;
+
+               if (setEnable) {
+                       String Setting_CambriaBaseUrl=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled);
+                       
+                       Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?"localhost":Setting_CambriaBaseUrl;
+                       
+                       String Setting_CambriaTopic=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaTopic);
+                       if(Setting_CambriaTopic==null) Setting_CambriaTopic = "msgrtr.apinode.metrics.dmaap";     
+                       
+       
+                       
+                       String Setting_CambriaSendFreqSecs=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaSendFreqSecs);
+                       
+                       int _CambriaSendFreqSecs =30;
+                       if(Setting_CambriaSendFreqSecs!=null){
+                                _CambriaSendFreqSecs = Integer.parseInt(Setting_CambriaSendFreqSecs);
+                       }
+                       
+
+                       return sendPeriodically(scheduler, metrics,
+                                       Setting_CambriaBaseUrl,Setting_CambriaTopic,_CambriaSendFreqSecs
+                               );
+                       /*return DMaaPMetricsSender.sendPeriodically(scheduler, metrics,
+                                       settings.getString(kSetting_CambriaBaseUrl, "localhost"),
+                                       settings.getString(kSetting_CambriaTopic, defaultTopic),
+                                       settings.getInt(kSetting_CambriaSendFreqSecs, 30));*/
+               } else {
+                       return null;
+               }
+       }
+
+       /**
+        * Schedule a periodic send of the metrics registry to the given Cambria
+        * broker and topic.
+        * 
+        * @param scheduler
+        * @param metrics
+        *            the registry to send
+        * @param cambriaBaseUrl
+        *            the base URL for Cambria
+        * @param topic
+        *            the topic to publish on
+        * @param everySeconds
+        *            how frequently to publish
+        * @return a handle to the scheduled task
+        */
+       public static ScheduledFuture<?> sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics,
+                       String cambriaBaseUrl, String topic, int everySeconds) {
+               return scheduler.scheduleAtFixedRate(new org.onap.dmaap.mr.apiServer.metrics.cambria.DMaaPMetricsSender(metrics, cambriaBaseUrl, topic), everySeconds,
+                               everySeconds, TimeUnit.SECONDS);
+       }
+
+       /**
+        * Create a metrics sender.
+        * 
+        * @param metrics
+        * @param cambriaBaseUrl
+        * @param topic
+        */
+       public DMaaPMetricsSender(CdmMetricsRegistry metrics, String cambriaBaseUrl, String topic) {
+               try {
+                       fMetrics = metrics;
+                       fHostname = InetAddress.getLocalHost().getHostName();
+
+                       // setup a "simple" publisher that will send metrics immediately
+                       fCambria = DMaaPCambriaClientFactory.createSimplePublisher(cambriaBaseUrl, topic);
+               } catch (UnknownHostException e) {
+                       log.warn("Unable to get localhost address in MetricsSender constructor.", e);
+                       throw new RuntimeException(e);
+               }
+       }
+
+       /**
+        * Send on demand.
+        */
+       public void send() {
+               try {
+                       final JSONObject o = fMetrics.toJson();
+                       o.put("hostname", fHostname);
+                       o.put("now", System.currentTimeMillis());
+                       o.put("metricsSendTime", addTimeStamp());
+                       o.put("transactionEnabled", false);
+                       fCambria.send(fHostname, o.toString());
+               } catch (JSONException e) {
+                       log.warn("Error posting metrics to Cambria: " + e.getMessage());
+               } catch (IOException e) {
+                       log.warn("Error posting metrics to Cambria: " + e.getMessage());
+               }
+       }
+
+       /**
+        * Run() calls send(). It's meant for use in a background-scheduled task.
+        */
+       @Override
+       public void run() {
+               send();
+       }
+
+       private final CdmMetricsRegistry fMetrics;
+       private final CambriaPublisher fCambria;
+       private final String fHostname;
+
+       
+
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(MetricsSender.class);
+       /**
+        * method creates and returnd CdmConstant object using current timestamp
+        * 
+        * @return
+        */
+       public CdmConstant addTimeStamp() {
+               // Add the timestamp with every metrics send
+               final long metricsSendTime = System.currentTimeMillis();
+               final Date d = new Date(metricsSendTime);
+               final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d);
+               return new CdmConstant(metricsSendTime / 1000, "Metrics Send Time (epoch); " + text);
+       }
+}
diff --git a/src/main/java/org/onap/dmaap/mr/filter/ContentLengthFilter.java b/src/main/java/org/onap/dmaap/mr/filter/ContentLengthFilter.java
new file mode 100644 (file)
index 0000000..914efb9
--- /dev/null
@@ -0,0 +1,127 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.filter;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.http.HttpStatus;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
+import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode;
+import org.onap.dmaap.dmf.mr.exception.ErrorResponse;
+import org.onap.dmaap.mr.filter.DefaultLength;
+import org.springframework.context.ApplicationContext;
+import org.springframework.web.context.support.WebApplicationContextUtils;
+
+import javax.servlet.*;
+import javax.servlet.http.HttpServletRequest;
+import java.io.IOException;
+
+/**
+ * Servlet Filter implementation class ContentLengthFilter
+ */
+public class ContentLengthFilter implements Filter {
+
+       private DefaultLength defaultLength;
+
+       private FilterConfig filterConfig = null;
+       DMaaPErrorMessages errorMessages = null;
+
+       private static final EELFLogger log = EELFManager.getInstance().getLogger(org.onap.dmaap.mr.filter.ContentLengthFilter.class);
+       /**
+        * Default constructor.
+        */
+
+       public ContentLengthFilter() {
+               // TODO Auto-generated constructor stub
+       }
+
+       /**
+        * @see Filter#destroy()
+        */
+       public void destroy() {
+               // TODO Auto-generated method stub
+       }
+
+       /**
+        * @see Filter#doFilter(ServletRequest, ServletResponse, FilterChain)
+        */
+       public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException,
+                       ServletException {
+               log.info("inside servlet do filter content length checking before pub/sub");
+               HttpServletRequest request = (HttpServletRequest) req;
+               JSONObject jsonObj = null;
+               int requestLength = 0;
+               try {
+                       // retrieving content length from message header
+
+                       if (null != request.getHeader("Content-Length")) {
+                               requestLength = Integer.parseInt(request.getHeader("Content-Length"));
+                       }
+                       // retrieving encoding from message header
+                       String transferEncoding = request.getHeader("Transfer-Encoding");
+                       // checking for no encoding, chunked and requestLength greater then
+                       // default length
+                       if (null != transferEncoding && !(transferEncoding.contains("chunked"))
+                                       && (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) {
+                               jsonObj = new JSONObject().append("defaultlength", defaultLength)
+                                               .append("requestlength", requestLength);
+                               log.error("message length is greater than default");
+                               throw new CambriaApiException(jsonObj);
+                       } else if (null == transferEncoding && (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) {
+                               jsonObj = new JSONObject().append("defaultlength", defaultLength.getDefaultLength()).append(
+                                               "requestlength", requestLength);
+                               log.error("Request message is not chunked or request length is greater than default length");
+                               throw new CambriaApiException(jsonObj);
+                       } else {
+                               chain.doFilter(req, res);
+                       }
+               } catch (CambriaApiException | NumberFormatException e) {
+                       log.error("message size is greater then default", e);
+            if (jsonObj != null) {
+                ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED,
+                        DMaaPResponseCode.MSG_SIZE_EXCEEDS_MSG_LIMIT.getResponseCode(),
+                        errorMessages.getMsgSizeExceeds()
+                                + jsonObj.toString());
+                log.info(errRes.toString());
+            }
+               }
+
+       }
+
+       /**
+        * @see Filter#init(FilterConfig)
+        */
+       public void init(FilterConfig fConfig) throws ServletException {
+               this.filterConfig = fConfig;
+               log.info("Filter Content Length Initialize");
+               ApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(fConfig
+                               .getServletContext());
+               DefaultLength defLength = (DefaultLength) ctx.getBean("defLength");
+               DMaaPErrorMessages errMessages = (DMaaPErrorMessages) ctx.getBean("DMaaPErrorMessages");
+               this.errorMessages = errMessages;
+               this.defaultLength = defLength;
+
+       }
+
+}
diff --git a/src/main/java/org/onap/dmaap/mr/filter/DefaultLength.java b/src/main/java/org/onap/dmaap/mr/filter/DefaultLength.java
new file mode 100644 (file)
index 0000000..3425823
--- /dev/null
@@ -0,0 +1,37 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.filter;
+
+
+public class DefaultLength {
+       
+       String defLength;
+
+       public String getDefaultLength() {
+               return defLength;
+       }
+
+       public void setDefaultLength(String defaultLength) {
+               this.defLength = defaultLength;
+       }
+
+}
diff --git a/src/main/resources/DMaaPUrl.properties b/src/main/resources/DMaaPUrl.properties
new file mode 100644 (file)
index 0000000..2f4dee2
--- /dev/null
@@ -0,0 +1,39 @@
+###############################################################################
+#  ============LICENSE_START=======================================================
+#  org.onap.dmaap
+#  ================================================================================
+#  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+#  ================================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#        http://www.apache.org/licenses/LICENSE-2.0
+#  
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=========================================================
+#
+#  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#  
+###############################################################################
+#dev1 server
+#url=http://hltd435.hydc.sbc.com:8080/DMaaP/dmaaprest
+
+#dev2 server
+#url=http://hltd436.hydc.sbc.com:8080/DMaaP/dmaaprest/
+#url=http://hltd436.hydc.sbc.com:8181/DMaaP/dmaaprest/
+
+#localhost
+#url=http://hltd435.hydc.sbc.com:8089/DMaaP/dmaaprest/
+url=http://hltd436.hydc.sbc.com:8080/DMaaP/dmaaprest/
+#cloud
+#url=http://127.0.0.1:8090/DMaaP/dmaaprest/
+
+#X-Cambria-Date
+date=2015-11-23T8:56:19-0700
+
+# topic
+topicName=org.onap.dmaap.mr.testtopic
\ No newline at end of file
diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties
new file mode 100644 (file)
index 0000000..af4caca
--- /dev/null
@@ -0,0 +1,8 @@
+#Metrics related configurations
+management.endpoint.metrics.enabled:true
+management.endpoints.web.exposure.include:*
+management.endpoint.prometheus.enabled:true
+management.metrics.export.prometheus.enabled:true
+management.metrics.enable.jvm=true
+management.metrics.distribution.percentiles-histogram.http.server.requests=true
+management.metrics.distribution.sla.http.server.requests=100ms,300ms,500ms
\ No newline at end of file
diff --git a/src/main/resources/cambriaApiVersion.properties b/src/main/resources/cambriaApiVersion.properties
new file mode 100644 (file)
index 0000000..4521526
--- /dev/null
@@ -0,0 +1,23 @@
+###############################################################################
+#  ============LICENSE_START=======================================================
+#  org.onap.dmaap
+#  ================================================================================
+#  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+#  ================================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#        http://www.apache.org/licenses/LICENSE-2.0
+#  
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=========================================================
+#
+#  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#  
+###############################################################################
+
+cambriaApiVersion=${project.version}
diff --git a/src/main/resources/dme2testcase.properties b/src/main/resources/dme2testcase.properties
new file mode 100644 (file)
index 0000000..a1b09f6
--- /dev/null
@@ -0,0 +1,85 @@
+###############################################################################
+#  ============LICENSE_START=======================================================
+#  org.onap.dmaap
+#  ================================================================================
+#  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+#  ================================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#        http://www.apache.org/licenses/LICENSE-2.0
+#  
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=========================================================
+#
+#  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#  
+###############################################################################
+Version =1.0
+Environment =TEST
+Latitude =37.66
+Longitude =-122.096839
+ServiceName =org.onap.dmaap.mr/apiKeys
+Partner =MR1
+
+#producerConsumer
+SubContextPathproducer =/events/
+SubContextPathConsumer=/events/
+group=group
+id=user@me.dmaap.onap.com
+
+#filter
+SubContextPathConsumerFilter=/events/
+filterType=filter={"class":"Equals", "field":"email", "value":"test@onap.com"}
+
+#topics
+subContextPathGetAllTopic=/topics
+subContextPathGetOneTopic=/topics/
+SubContextPathGetPublisher=/topics/test/producers
+SubContextPathGetPermitPublisher=/topics/test/producers/test@onap.com
+SubContextPathGetConsumer=/topics/test/consumers
+SubContextPathCreateTopic=/topics/create
+SubContextPathGetPermitConsumer=/topics/test/consumers/test@onap.com
+newTopic=org.onap.dmaap.mr.junittestingtopic
+topicDescription=new topic creation
+partition=1
+replication=1
+txenabled=true
+deleteTopic=org.onap.dmaap.mr.deleteTopic
+
+
+#Admin
+SubContextPathGetAdminConsumerCache=/consumerCache
+SubContextPathDropAdminConsumerCache=/dropConsumerCache
+
+#Metrics
+SubContextPathGetMetrics=/metrics
+SubContextPathGetMetricsByName=/metrics/startTime
+
+#apikey
+SubContextPathGetApiKeys=/apiKeys
+SubContextPathGetCreateKeys=/create
+SubContextPathUpdateKeys=/apiKeys/
+SubContextPathDeleteteKeys=/apiKeys/
+SubContextPathGetOneKey=/apiKeys/
+
+Protocol =http
+
+
+#methodType
+MethodTypePost =POST
+MethodTypeGet=GET
+MethodTypePut=PUT
+MethodTypeDelete=DELETE
+message ={"id": "test@onap.com"}
+
+user=<user_id>
+password=<password>
+
+
+contenttype=application/json
+contenttypejson=application/json
\ No newline at end of file
diff --git a/src/main/resources/endpoint.properties b/src/main/resources/endpoint.properties
new file mode 100644 (file)
index 0000000..63742ef
--- /dev/null
@@ -0,0 +1,31 @@
+###############################################################################
+#  ============LICENSE_START=======================================================
+#  org.onap.dmaap
+#  ================================================================================
+#  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+#  ================================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#        http://www.apache.org/licenses/LICENSE-2.0
+#  
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=========================================================
+#
+#  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#  
+###############################################################################
+Latitude =37.66
+Longitude =-122.096839
+Version =1.0.0
+ServiceName =org.onap.dmaap.dmaap.demo1
+Environment =DEV
+RouteOffer =LA
+HostName =hltd436.hydc.sbc.com
+Port =8080
+ContextPath =/DMaaP/dmaaprest
+Protocol =http
\ No newline at end of file
diff --git a/src/main/resources/images/attLogo.gif b/src/main/resources/images/attLogo.gif
new file mode 100644 (file)
index 0000000..10f184c
Binary files /dev/null and b/src/main/resources/images/attLogo.gif differ
diff --git a/src/main/resources/images/att_vt_1cp_grd_rev.gif b/src/main/resources/images/att_vt_1cp_grd_rev.gif
new file mode 100644 (file)
index 0000000..034515c
Binary files /dev/null and b/src/main/resources/images/att_vt_1cp_grd_rev.gif differ
diff --git a/src/main/resources/kafka.properties b/src/main/resources/kafka.properties
new file mode 100644 (file)
index 0000000..876ffed
--- /dev/null
@@ -0,0 +1 @@
+key=admin_secret
\ No newline at end of file
diff --git a/src/main/resources/templates/hello.html b/src/main/resources/templates/hello.html
new file mode 100644 (file)
index 0000000..69a65ab
--- /dev/null
@@ -0,0 +1,9 @@
+#set($tab="")
+#parse("header.html")
+
+               <h1>Cambria API</h1>
+               <p>This is a Cambria API server, part of the Universal Event Broker service, a general purpose,
+               high-throughput pub/sub event routing system.</p>
+               <p>Please see <a href="http://sa2020.it.att.com:8888/sw/cambria/intro">the Cambria project</a> information page.
+
+#parse("footer.html")
index 4b1fb74..a8c976f 100644 (file)
@@ -20,7 +20,7 @@
 
  package org.onap.dmaap;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertTrue;
 
 import javax.ws.rs.BadRequestException;
 import javax.ws.rs.InternalServerErrorException;
@@ -28,19 +28,17 @@ import javax.ws.rs.NotAllowedException;
 import javax.ws.rs.NotAuthorizedException;
 import javax.ws.rs.NotFoundException;
 import javax.ws.rs.ServiceUnavailableException;
-
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
+import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
 import org.powermock.api.mockito.PowerMockito;
 import org.powermock.core.classloader.annotations.PowerMockIgnore;
 import org.powermock.modules.junit4.PowerMockRunner;
 
-import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
-
 @RunWith(PowerMockRunner.class)
 @PowerMockIgnore("jdk.internal.reflect.*")
 public class DMaaPWebExceptionMapperTest {
diff --git a/src/test/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticatorImplTest.java b/src/test/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticatorImplTest.java
new file mode 100644 (file)
index 0000000..aa1620e
--- /dev/null
@@ -0,0 +1,117 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.dmaap.dmf.mr.security;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.BDDMockito.given;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Spy;
+import org.mockito.runners.MockitoJUnitRunner;
+import org.springframework.mock.web.MockHttpServletRequest;
+
+@RunWith(MockitoJUnitRunner.class)
+public class DMaaPAAFAuthenticatorImplTest {
+
+    private MockHttpServletRequest request;
+    @Spy
+    private DMaaPAAFAuthenticatorImpl aafAuthorizer;
+
+    @Before
+    public void setUp() throws Exception {
+        request = new MockHttpServletRequest();
+    }
+
+
+    @Test
+    public void aafAuthentication_shouldSuccess_whenRequestIsConfiguredWithProperUserRole() {
+        //given
+        String userRole = "org.onap.dmaap.mr.topic|:topic.org.onap.dmaap.mr.aSimpleTopic|sub";
+        request.addUserRole(userRole);
+
+        //when
+        boolean isAuthorized = aafAuthorizer.aafAuthentication(request, userRole);
+
+        //then
+        assertTrue(isAuthorized);
+    }
+
+    @Test
+    public void aafAuthentication_shouldFail_whenRequestIsConfiguredWithProperUserRole() {
+        //given
+        String userRole = "org.onap.dmaap.mr.topic|:topic.org.onap.dmaap.mr.aSimpleTopic|pub";
+
+        //when
+        boolean isAuthorized = aafAuthorizer.aafAuthentication(request, userRole);
+
+        //then
+        assertFalse(isAuthorized);
+    }
+
+    @Test
+    public void getPermissionAsString_shouldReturnValidTopicPermission_whenTopicWithNamespace() throws Exception {
+        //given
+        String topicPermission = "org.onap.dmaap.mr.topic|:topic.org.onap.dmaap.mr.aSimpleTopic|pub";
+        String topicName = "org.onap.dmaap.mr.aSimpleTopic";
+        String operation = "pub";
+
+        //when
+        String resultPem = aafAuthorizer.aafPermissionString(topicName, operation);
+
+        //then
+        assertEquals(topicPermission, resultPem);
+    }
+
+    @Test
+    public void getPermissionAsString_shouldReturnValidTopicPermission_whenTopicWithoutNamespace() throws Exception {
+        //given
+        String topicPermission = "org.onap.dmaap.mr.topic|:topic.topicName|pub";
+        String topicName = "topicName";
+        String operation = "pub";
+
+        //when
+        String resultPem = aafAuthorizer.aafPermissionString(topicName, operation);
+
+        //then
+        assertEquals(topicPermission, resultPem);
+    }
+
+    @Test
+    public void getPermissionAsString_shouldReturnValidTopicPermission_whenNamespaceReadFromProperty() throws Exception {
+        //given
+        String topicPermission = "com.custom.ns.topic|:topic.topicName|pub";
+        String topicName = "topicName";
+        String operation = "pub";
+        String customNamespace = "com.custom.ns";
+        given(aafAuthorizer.readNamespaceFromProperties()).willReturn(customNamespace);
+
+        //when
+        String resultPem = aafAuthorizer.aafPermissionString(topicName, operation);
+
+        //then
+        assertEquals(topicPermission, resultPem);
+    }
+
+
+}
diff --git a/src/test/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImplTest.java b/src/test/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImplTest.java
new file mode 100644 (file)
index 0000000..068e9f9
--- /dev/null
@@ -0,0 +1,582 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.dmf.mr.service.impl;
+
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyInt;
+import static org.mockito.Matchers.anyLong;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import com.att.nsa.limits.Blacklist;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.ConcurrentModificationException;
+import javax.servlet.http.HttpServletRequest;
+import joptsimple.internal.Strings;
+import org.apache.http.HttpStatus;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.json.JSONObject;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.InOrder;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+import org.mockito.Spy;
+import org.mockito.runners.MockitoJUnitRunner;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.Consumer;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import org.onap.dmaap.dmf.mr.backends.MetricsSet;
+import org.onap.dmaap.dmf.mr.backends.Publisher;
+import org.onap.dmaap.dmf.mr.beans.DMaaPCambriaLimiter;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import org.onap.dmaap.dmf.mr.exception.DMaaPAccessDeniedException;
+import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+import org.onap.dmaap.dmf.mr.resources.CambriaOutboundEventStream;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.springframework.mock.web.MockHttpServletRequest;
+import org.springframework.mock.web.MockHttpServletResponse;
+
+@RunWith(MockitoJUnitRunner.class)
+public class EventsServiceImplTest {
+
+    private InputStream iStream = null;
+    private DMaaPContext dMaapContext = new DMaaPContext();
+    private DMaaPErrorMessages pErrorMessages = new DMaaPErrorMessages();
+    @Mock
+    private ConfigurationReader configurationReader;
+    @Mock
+    private Blacklist blacklist;
+    @Mock
+    private DMaaPAuthenticator<NsaSimpleApiKey> dmaaPAuthenticator;
+    @Mock
+    private NsaSimpleApiKey nsaSimpleApiKey;
+    @Mock
+    private DMaaPKafkaMetaBroker dmaapKafkaMetaBroker;
+    @Mock
+    private Topic createdTopic;
+    @Mock
+    private ConsumerFactory factory;
+    @Mock
+    private Consumer consumer;
+    @Mock
+    private Publisher publisher;
+    @Mock
+    private DMaaPCambriaLimiter limiter;
+    @Mock
+    private MetricsSet metrics;
+    @Spy
+    private EventsServiceImpl eventsService;
+
+
+    @Rule
+    public ExpectedException thrown = ExpectedException.none();
+
+    private MockHttpServletRequest request;
+
+
+    @Before
+    public void setUp() throws Exception {
+        MockitoAnnotations.initMocks(this);
+        String source = "source of my InputStream";
+        iStream = new ByteArrayInputStream(source.getBytes("UTF-8"));
+
+        request = new MockHttpServletRequest();
+        MockHttpServletResponse response = new MockHttpServletResponse();
+        dMaapContext.setRequest(request);
+        dMaapContext.setResponse(response);
+        when(blacklist.contains(anyString())).thenReturn(false);
+        when(configurationReader.getfIpBlackList()).thenReturn(blacklist);
+        when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator);
+        dMaapContext.setConfigReader(configurationReader);
+        eventsService.setErrorMessages(pErrorMessages);
+        doReturn("100").when(eventsService).getPropertyFromAJSCmap("timeout");
+    }
+
+    @Test
+    public void getEvents_shouldFailOnAafAuthorization() throws Exception {
+        String topicPrefix = "org.onap.aaf.enforced";
+        String topicName = topicPrefix + ".topicName";
+        when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);
+        when(dmaapKafkaMetaBroker.getTopic(topicName)).thenReturn(createdTopic);
+        when(eventsService.getPropertyFromAJSCmap("enforced.topic.name.AAF")).thenReturn(topicPrefix);
+        when(eventsService.isCadiEnabled()).thenReturn(true);
+
+        thrown.expect(DMaaPAccessDeniedException.class);
+        thrown.expectMessage(containsString(String.valueOf(HttpStatus.SC_UNAUTHORIZED)));
+
+        eventsService.getEvents(dMaapContext, topicName, "CG1", "23");
+    }
+
+    @Test
+    public void getEvents_shouldFail_whenRemoteAddressIsBlacklisted() throws Exception {
+        String remoteIp = "10.154.17.115";
+        request.setRemoteAddr(remoteIp);
+        when(blacklist.contains(remoteIp)).thenReturn(true);
+        when(configurationReader.getfIpBlackList()).thenReturn(blacklist);
+
+        thrown.expect(CambriaApiException.class);
+        thrown.expectMessage(containsString(String.valueOf(HttpStatus.SC_FORBIDDEN)));
+
+        eventsService.getEvents(dMaapContext, "testTopic", "CG1", "23");
+    }
+
+    @Test
+    public void getEvents_shouldFail_whenRequestedTopicNotExists() throws Exception {
+        when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);
+        when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(null);
+
+        thrown.expect(CambriaApiException.class);
+        thrown.expectMessage(containsString(String.valueOf(HttpStatus.SC_NOT_FOUND)));
+
+        eventsService.getEvents(dMaapContext, "testTopic", "CG1", "23");
+    }
+
+    @Test
+    public void getEvents_shouldFail_whenConsumerLockCannotBeAcquired() throws Exception {
+        //given
+        String topicName = "testTopic345";
+        String consumerGroup = "CG5";
+        String clientId = "13";
+        when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);
+        when(configurationReader.getfRateLimiter()).thenReturn(limiter);
+        when(dmaapKafkaMetaBroker.getTopic(topicName)).thenReturn(createdTopic);
+        when(configurationReader.getfConsumerFactory()).thenReturn(factory);
+        doThrow(new UnavailableException("Could not acquire consumer lock")).when(factory)
+            .getConsumerFor(eq(topicName), eq(consumerGroup), eq(clientId), anyInt(), anyString());
+
+        thrown.expect(CambriaApiException.class);
+        thrown.expectMessage(containsString(String.valueOf(HttpStatus.SC_SERVICE_UNAVAILABLE)));
+
+        //when
+        eventsService.getEvents(dMaapContext, topicName, consumerGroup, clientId);
+
+        //then
+        verify(factory).getConsumerFor(eq(topicName), eq(consumerGroup), eq(clientId), anyInt(), anyString());
+
+    }
+
+    @Test
+    public void getEvents_shouldFail_whenBrokerServicesAreUnavailable() throws Exception {
+        String topicName = "testTopic";
+        String consumerGroup = "CG1";
+        String clientId = "23";
+        when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);
+        when(dmaapKafkaMetaBroker.getTopic(topicName)).thenReturn(createdTopic);
+        when(configurationReader.getfConsumerFactory()).thenReturn(factory);
+
+        givenUserAuthorizedWithAAF(request, topicName, "sub");
+
+        thrown.expect(CambriaApiException.class);
+        thrown.expectMessage(containsString(String.valueOf(HttpStatus.SC_SERVICE_UNAVAILABLE)));
+
+        //when
+        eventsService.getEvents(dMaapContext, topicName, consumerGroup, clientId);
+
+        //then
+        verify(factory).destroyConsumer(topicName, consumerGroup, clientId);
+    }
+
+    private void givenUserAuthorizedWithAAF(MockHttpServletRequest request, String topicName, String operation) {
+        String permission = "org.onap.dmaap.mr.topic|:topic." + topicName + "|" + operation;
+        request.addUserRole(permission);
+    }
+
+    @Test
+    public void getEvents_shouldHandleConcurrentModificationError() throws Exception {
+        String testTopic = "testTopic";
+        when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);
+        when(dmaapKafkaMetaBroker.getTopic(testTopic)).thenReturn(createdTopic);
+        when(configurationReader.getfRateLimiter()).thenThrow(new ConcurrentModificationException("Error occurred"));
+        givenUserAuthorizedWithAAF(request, testTopic, "sub");
+
+        thrown.expect(CambriaApiException.class);
+        thrown.expectMessage(containsString(String.valueOf(HttpStatus.SC_CONFLICT)));
+
+        eventsService.getEvents(dMaapContext, "testTopic", "CG1", "23");
+    }
+
+    @Test
+    public void getEvents_shouldNotAuthorizeClient_whenSubscribingToMetricsTopic() throws Exception {
+        //given
+        HttpServletRequest permittedRequest = mock(HttpServletRequest.class);
+        when(permittedRequest.getHeaders(anyString())).thenReturn(Collections.<String>emptyEnumeration());
+        dMaapContext.setRequest(permittedRequest);
+        String metricsTopicName = "msgrtr.apinode.metrics.dmaap";
+        String consumerGroup = "CG5";
+        String clientId = "7";
+        givenConfiguredWithMocks(metricsTopicName);
+        when(factory.getConsumerFor(eq(metricsTopicName), eq(consumerGroup), eq(clientId), anyInt(), any()))
+            .thenReturn(consumer);
+        doNothing().when(eventsService).respondOkWithStream(eq(dMaapContext), any(CambriaOutboundEventStream.class));
+
+        //when
+        eventsService.getEvents(dMaapContext, metricsTopicName, consumerGroup, clientId);
+
+        //then
+        verify(eventsService).respondOkWithStream(eq(dMaapContext), any(CambriaOutboundEventStream.class));
+        verify(dmaaPAuthenticator, never()).authenticate(dMaapContext);
+        verify(permittedRequest, never()).isUserInRole(anyString());
+    }
+
+    @Test
+    public void getEvents_shouldNotAuthorizeClient_whenTopicNoteEnforcedWithAaf_andTopicHasNoOwnerSet()
+        throws Exception {
+        //given
+        String topicName = "someSimpleTopicName";
+        String consumerGroup = "CG5";
+        String clientId = "7";
+        HttpServletRequest permittedRequest = mock(HttpServletRequest.class);
+        when(permittedRequest.getHeaders(anyString())).thenReturn(Collections.<String>emptyEnumeration());
+        dMaapContext.setRequest(permittedRequest);
+        givenConfiguredWithMocks(topicName);
+        when(factory.getConsumerFor(eq(topicName), eq(consumerGroup), eq(clientId), anyInt(), any()))
+            .thenReturn(consumer);
+        doNothing().when(eventsService).respondOkWithStream(eq(dMaapContext), any(CambriaOutboundEventStream.class));
+        when(createdTopic.getOwner()).thenReturn(Strings.EMPTY);
+
+        //when
+        eventsService.getEvents(dMaapContext, topicName, consumerGroup, clientId);
+
+        //then
+        verify(eventsService).respondOkWithStream(eq(dMaapContext), any(CambriaOutboundEventStream.class));
+        verify(dmaaPAuthenticator, never()).authenticate(dMaapContext);
+        verify(permittedRequest, never()).isUserInRole(anyString());
+    }
+
+    @Test
+    public void getEvents_shouldFailDmaapAuthorization_whenTopicOwnerIsSet_andUserHasNoReadPermissionToTopic()
+        throws Exception {
+        //given
+        String topicName = "someSimpleTopicName";
+        String consumerGroup = "CG5";
+        String clientId = "7";
+        HttpServletRequest permittedRequest = mock(HttpServletRequest.class);
+        when(permittedRequest.getHeaders(anyString())).thenReturn(Collections.<String>emptyEnumeration());
+        dMaapContext.setRequest(permittedRequest);
+        givenConfiguredWithMocks(topicName);
+        when(createdTopic.getOwner()).thenReturn("SimpleTopicOwner");
+        when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey);
+        doThrow(new AccessDeniedException("userName")).when(createdTopic).checkUserRead(nsaSimpleApiKey);
+
+        thrown.expect(AccessDeniedException.class);
+
+        //when
+        eventsService.getEvents(dMaapContext, topicName, consumerGroup, clientId);
+
+        //then
+        verify(createdTopic).checkUserRead(nsaSimpleApiKey);
+        verify(eventsService, never()).respondOkWithStream(eq(dMaapContext), any(CambriaOutboundEventStream.class));
+        verify(permittedRequest, never()).isUserInRole(anyString());
+    }
+
+
+    @Test
+    public void getEvents_shouldSuccessfullyRegisterConsumerToEventsStream_withAafAuthorization() throws Exception {
+        //given
+        String topicName = "testTopic";
+        String consumerGroup = "CG2";
+        String clientId = "6";
+        String messageLimit = "10";
+        String timeout = "25";
+        String meta = "yes";
+        String pretty = "on";
+        String cacheEnabled = "false";
+
+        givenConfiguredWithMocks(topicName);
+        givenConfiguredWithProperties(messageLimit, timeout, meta, pretty, cacheEnabled);
+        when(factory.getConsumerFor(eq(topicName), eq(consumerGroup), eq(clientId), anyInt(), anyString()))
+            .thenReturn(consumer);
+        givenUserAuthorizedWithAAF(request, topicName, "sub");
+
+        //when
+        eventsService.getEvents(dMaapContext, topicName, consumerGroup, clientId);
+
+        //then
+        ArgumentCaptor<CambriaOutboundEventStream> osWriter = ArgumentCaptor.forClass(CambriaOutboundEventStream.class);
+        verifyInvocationOrderForSuccessCase(topicName, consumerGroup, clientId, osWriter);
+        assertEventStreamProperties(osWriter.getValue(), messageLimit, timeout);
+    }
+
+    private void assertEventStreamProperties(CambriaOutboundEventStream stream, String messageLimit, String timeout) {
+        assertEquals(Integer.valueOf(messageLimit).intValue(), stream.getfLimit());
+        assertEquals(Integer.valueOf(timeout).intValue(), stream.getfTimeoutMs());
+        assertTrue(stream.isfWithMeta());
+        assertTrue(stream.isfPretty());
+    }
+
+    private void givenConfiguredWithProperties(String messageLimit, String timeout, String meta, String pretty,
+                                               String cacheEnabled) {
+        when(eventsService.getPropertyFromAJSCmap("meta")).thenReturn(meta);
+        when(eventsService.getPropertyFromAJSCmap("pretty")).thenReturn(pretty);
+        when(eventsService.getPropertyFromAJSCmap(ConsumerFactory.kSetting_EnableCache)).thenReturn(cacheEnabled);
+        request.addParameter("timeout", timeout);
+        request.addParameter("limit", messageLimit);
+    }
+
+    private void givenConfiguredWithMocks(String topicName) throws Exception {
+        when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);
+        when(configurationReader.getfRateLimiter()).thenReturn(limiter);
+        when(configurationReader.getfMetrics()).thenReturn(metrics);
+        when(dmaapKafkaMetaBroker.getTopic(topicName)).thenReturn(createdTopic);
+        when(configurationReader.getfConsumerFactory()).thenReturn(factory);
+        when(configurationReader.getfPublisher()).thenReturn(publisher);
+    }
+
+    private void verifyInvocationOrderForSuccessCase(String topicName, String consumerGroup, String clientId,
+                                                     ArgumentCaptor<CambriaOutboundEventStream> osWriter) throws Exception {
+
+        InOrder inOrder = Mockito.inOrder(configurationReader, factory, metrics, limiter, consumer, eventsService);
+        inOrder.verify(configurationReader).getfMetrics();
+        inOrder.verify(configurationReader).getfRateLimiter();
+        inOrder.verify(limiter).onCall(eq(topicName), eq(consumerGroup), eq(clientId), anyString());
+        inOrder.verify(factory).getConsumerFor(eq(topicName), eq(consumerGroup), eq(clientId), anyInt(), anyString());
+        inOrder.verify(eventsService).respondOkWithStream(eq(dMaapContext), osWriter.capture());
+        inOrder.verify(consumer).commitOffsets();
+        inOrder.verify(metrics).consumeTick(anyInt());
+        inOrder.verify(limiter).onSend(eq(topicName), eq(consumerGroup), eq(clientId), anyLong());
+        inOrder.verify(consumer).close();
+        inOrder.verifyNoMoreInteractions();
+    }
+
+    @Test
+    public void pushEvents_shouldFail_whenRemoteAddressIsBlacklisted() throws Exception {
+        String remoteIp = "10.132.64.112";
+        request.setRemoteAddr(remoteIp);
+        when(configurationReader.getfIpBlackList()).thenReturn(blacklist);
+        when(blacklist.contains(anyString())).thenReturn(true);
+
+        thrown.expect(CambriaApiException.class);
+        thrown.expectMessage(containsString(String.valueOf(HttpStatus.SC_FORBIDDEN)));
+
+        eventsService.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00");
+    }
+
+
+
+
+    @Test
+    public void pushEvents_shouldFailDmaapAuthorization_whenTopicOwnerIsSet_andUserHasNoWritePermissionToTopic()
+        throws Exception {
+        //given
+        String topicName = "someSimpleTopicName";
+
+        HttpServletRequest permittedRequest = mock(HttpServletRequest.class);
+        when(permittedRequest.getHeaders(anyString())).thenReturn(Collections.<String>emptyEnumeration());
+        dMaapContext.setRequest(permittedRequest);
+        givenConfiguredWithMocks(topicName);
+        when(createdTopic.getOwner()).thenReturn("SimpleTopicOwner");
+        when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey);
+        doThrow(new AccessDeniedException("userName")).when(createdTopic).checkUserWrite(nsaSimpleApiKey);
+
+        thrown.expect(AccessDeniedException.class);
+
+        //when
+        eventsService.pushEvents(dMaapContext, topicName, iStream, "5", "13:00:00");
+
+        //then
+        verify(createdTopic).checkUserWrite(nsaSimpleApiKey);
+        verify(eventsService, never()).respondOkWithStream(eq(dMaapContext), any(CambriaOutboundEventStream.class));
+        verify(permittedRequest, never()).isUserInRole(anyString());
+    }
+
+    @Test
+    public void pushEvents_shouldFailOnAafAuthorization_whenCadiIsEnabled_topicNameEnforced_andUserHasNoPermission()
+        throws Exception {
+        //given
+        String topicPrefix = "org.onap.aaf.enforced";
+        String topicName = topicPrefix + ".topicName";
+        String permission = "org.onap.dmaap.mr.topic|:topic." + topicName + "|pub";
+        HttpServletRequest deniedRequest = mock(HttpServletRequest.class);
+        when(deniedRequest.getHeaders(anyString())).thenReturn(Collections.<String>emptyEnumeration());
+        when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);
+        when(dmaapKafkaMetaBroker.getTopic(topicName)).thenReturn(createdTopic);
+        when(eventsService.getPropertyFromAJSCmap("enforced.topic.name.AAF")).thenReturn(topicPrefix);
+        when(eventsService.isCadiEnabled()).thenReturn(true);
+        dMaapContext.setRequest(deniedRequest);
+
+        thrown.expect(DMaaPAccessDeniedException.class);
+        thrown.expectMessage(containsString(String.valueOf(HttpStatus.SC_UNAUTHORIZED)));
+
+        //when
+        eventsService.pushEvents(dMaapContext, topicName, iStream, "5", "13:00:00");
+
+        //then
+        verify(deniedRequest).isUserInRole(permission);
+    }
+
+
+    @Test
+    public void pushEvents_shouldPublishMessagesWithoutTransaction() throws Exception {
+        //given
+        String topicName = "topicWithoutTransaction";
+        givenConfiguredWithMocks(topicName);
+        doNothing().when(eventsService).respondOk(eq(dMaapContext), any(JSONObject.class));
+
+        //when
+        eventsService.pushEvents(dMaapContext, topicName, iStream, "5", "13:00:00");
+
+        //then
+        verify(publisher).sendBatchMessageNew(eq(topicName), Mockito.<ArrayList<ProducerRecord<String, String>>>any());
+        ArgumentCaptor<JSONObject> captor = ArgumentCaptor.forClass(JSONObject.class);
+        verify(eventsService).respondOk(eq(dMaapContext), captor.capture());
+        assertEquals(1, captor.getValue().getLong("count"));
+    }
+
+    @Test
+    public void pushEvents_shouldHandlePublisherError_whenPushWithoutTransaction() throws Exception {
+        //given
+        String topicName = "topicWithoutTransaction";
+        givenConfiguredWithMocks(topicName);
+        doThrow(new IOException()).when(publisher)
+            .sendBatchMessageNew(eq(topicName), Mockito.<ArrayList<ProducerRecord<String, String>>>any());
+
+        thrown.expect(CambriaApiException.class);
+        thrown.expectMessage(containsString(String.valueOf(HttpStatus.SC_NOT_FOUND)));
+
+        //when
+        eventsService.pushEvents(dMaapContext, topicName, iStream, "5", "13:00:00");
+
+        //then
+        verify(publisher).sendBatchMessageNew(eq(topicName), Mockito.<ArrayList<ProducerRecord<String, String>>>any());
+        verify(eventsService, never()).respondOk(any(DMaaPContext.class), any(JSONObject.class));
+    }
+
+
+    @Test
+    public void pushEvents_shouldPublishMessagesWithTransaction() throws Exception {
+        //given
+        String topicPrefix = "org.onap.dmaap.mr";
+        String topicName = topicPrefix + ".topicWithTransaction";
+        givenConfiguredWithMocks(topicName);
+        when(eventsService.getPropertyFromAJSCmap("enforced.topic.name.AAF")).thenReturn(topicPrefix);
+        when(eventsService.isCadiEnabled()).thenReturn(true);
+        doNothing().when(eventsService).respondOk(eq(dMaapContext), any(JSONObject.class));
+
+        request.addUserRole("org.onap.dmaap.mr.topic|:topic." + topicName + "|pub");
+
+        //when
+        eventsService.pushEvents(dMaapContext, topicName, iStream, "5", "13:00:00");
+
+        //then
+        verify(publisher).sendBatchMessageNew(eq(topicName), Mockito.<ArrayList<ProducerRecord<String, String>>>any());
+        ArgumentCaptor<JSONObject> captor = ArgumentCaptor.forClass(JSONObject.class);
+        verify(eventsService).respondOk(eq(dMaapContext), captor.capture());
+        assertEquals(1, captor.getValue().getLong("count"));
+        assertFalse(captor.getValue().getString("transactionId").isEmpty());
+    }
+
+    @Test
+    public void pushEvents_shouldHandlePublisherError_whenPushWithTransaction() throws Exception {
+        //given
+        String topicPrefix = "org.onap.dmaap.mr";
+        String topicName = topicPrefix + ".topicWithTransaction";
+        givenConfiguredWithMocks(topicName);
+        when(eventsService.getPropertyFromAJSCmap("enforced.topic.name.AAF")).thenReturn(topicPrefix);
+        when(eventsService.isCadiEnabled()).thenReturn(true);
+        request.addUserRole("org.onap.dmaap.mr.topic|:topic." + topicName + "|pub");
+        doThrow(new IOException()).when(publisher)
+            .sendBatchMessageNew(eq(topicName), Mockito.<ArrayList<ProducerRecord<String, String>>>any());
+
+        thrown.expect(CambriaApiException.class);
+        thrown.expectMessage(containsString(String.valueOf(HttpStatus.SC_NOT_FOUND)));
+
+        //when
+        eventsService.pushEvents(dMaapContext, topicName, iStream, "5", "13:00:00");
+
+        //then
+        verify(publisher).sendBatchMessageNew(eq(topicName), Mockito.<ArrayList<ProducerRecord<String, String>>>any());
+        verify(eventsService, never()).respondOk(any(DMaaPContext.class), any(JSONObject.class));
+    }
+
+    @Test
+    public void pushEvents_shouldNotPerformAnyAuthorization_whenPublishToMetricTopic() throws Exception {
+        //given
+        HttpServletRequest permittedRequest = mock(HttpServletRequest.class);
+        when(permittedRequest.getHeaders(anyString())).thenReturn(Collections.<String>emptyEnumeration());
+        dMaapContext.setRequest(permittedRequest);
+        String metricsTopicName = "msgrtr.apinode.metrics.dmaap";
+        givenConfiguredWithMocks(metricsTopicName);
+        doNothing().when(eventsService).respondOk(eq(dMaapContext), any(JSONObject.class));
+
+        //when
+        eventsService.pushEvents(dMaapContext, metricsTopicName, iStream, "5", "13:00:00");
+
+        //then
+        ArgumentCaptor<JSONObject> captor = ArgumentCaptor.forClass(JSONObject.class);
+        verify(publisher)
+            .sendBatchMessageNew(eq(metricsTopicName), Mockito.<ArrayList<ProducerRecord<String, String>>>any());
+        verify(eventsService).respondOk(eq(dMaapContext), captor.capture());
+        verify(permittedRequest, never()).isUserInRole(anyString());
+        verify(createdTopic, never()).checkUserWrite(any(NsaSimpleApiKey.class));
+        assertEquals(1, captor.getValue().getLong("count"));
+    }
+
+    @Test
+    public void pushEvents_shouldNotPerformAnyAuthorization_whenTopicHasNoOwner() throws Exception {
+        //given
+        HttpServletRequest permittedRequest = mock(HttpServletRequest.class);
+        when(permittedRequest.getHeaders(anyString())).thenReturn(Collections.<String>emptyEnumeration());
+        dMaapContext.setRequest(permittedRequest);
+        String topicName = "notEnforcedAafTopic";
+        givenConfiguredWithMocks(topicName);
+        doNothing().when(eventsService).respondOk(eq(dMaapContext), any(JSONObject.class));
+        when(createdTopic.getOwner()).thenReturn(null);
+
+        //when
+        eventsService.pushEvents(dMaapContext, topicName, iStream, "5", "13:00:00");
+
+        //then
+        ArgumentCaptor<JSONObject> captor = ArgumentCaptor.forClass(JSONObject.class);
+        verify(publisher).sendBatchMessageNew(eq(topicName), Mockito.<ArrayList<ProducerRecord<String, String>>>any());
+        verify(eventsService).respondOk(eq(dMaapContext), captor.capture());
+        verify(permittedRequest, never()).isUserInRole(anyString());
+        verify(createdTopic, never()).checkUserWrite(any(NsaSimpleApiKey.class));
+        assertEquals(1, captor.getValue().getLong("count"));
+    }
+
+}
diff --git a/src/test/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImplTest.java b/src/test/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImplTest.java
new file mode 100644 (file)
index 0000000..4424aa9
--- /dev/null
@@ -0,0 +1,673 @@
+/*
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Copyright (C) 2019 Nokia Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.dmf.mr.service.impl;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyBoolean;
+import static org.mockito.Matchers.anyInt;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Matchers.contains;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyZeroInteractions;
+import static org.mockito.Mockito.when;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import java.io.IOException;
+import java.nio.file.attribute.UserPrincipal;
+import java.security.Principal;
+import java.util.Arrays;
+import java.util.HashSet;
+import javax.servlet.ServletOutputStream;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.Spy;
+import org.mockito.runners.MockitoJUnitRunner;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+import org.onap.dmaap.dmf.mr.exception.DMaaPAccessDeniedException;
+import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
+import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException;
+import org.onap.dmaap.dmf.mr.metabroker.Broker1;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+
+
+@RunWith(MockitoJUnitRunner.class)
+public class TopicServiceImplTest {
+
+    private static final String TOPIC_CREATE_PEM = "org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:org.onap.dmaap.mr|create";
+    private static final String TOPIC_DELETE_PEM = "org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:org.onap.dmaap.mr|destroy";
+    private NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password");
+    private TopicBean topicBean;
+
+    @Spy
+    private TopicServiceImpl topicService;
+
+    @Mock
+    private DMaaPErrorMessages errorMessages;
+
+    @Mock
+    private DMaaPContext dmaapContext;
+
+    @Mock
+    private ConfigurationReader configReader;
+
+    @Mock
+    private ServletOutputStream oStream;
+
+    @Mock
+    private DMaaPAuthenticator<NsaSimpleApiKey> dmaaPAuthenticator;
+
+    @Mock
+    private HttpServletRequest httpServReq;
+
+    @Mock
+    private HttpServletResponse httpServRes;
+
+    @Mock
+    private DMaaPKafkaMetaBroker dmaapKafkaMetaBroker;
+
+    @Mock
+    private Topic createdTopic;
+
+    @Mock
+    private NsaAcl nsaAcl;
+
+    @Rule
+    public ExpectedException thrown = ExpectedException.none();
+
+    @Before
+    public void setUp() throws Exception {
+        configureSpyInstance();
+        topicService.setErrorMessages(errorMessages);
+
+        when(dmaapContext.getRequest()).thenReturn(httpServReq);
+    }
+
+    private void configureSpyInstance() throws Exception {
+        doReturn(user).when(topicService).getDmaapAuthenticatedUser(any(DMaaPContext.class));
+        doReturn(dmaapKafkaMetaBroker).when(topicService).getMetaBroker(any(DMaaPContext.class));
+        doNothing().when(topicService).respondOk(any(DMaaPContext.class),anyString());
+        doNothing().when(topicService).respondOk(any(DMaaPContext.class),any(JSONObject.class));
+        when(topicService.getPropertyFromAJSCbean("enforced.topic.name.AAF"))
+                .thenReturn("org.onap.dmaap.mr");
+        when(topicService.getPropertyFromAJSCmap("msgRtr.topicfactory.aaf"))
+                .thenReturn("org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:");
+    }
+
+    private void givenTopicBean(String topicName) {
+        topicBean = new TopicBean();
+        topicBean.setTopicName(topicName);
+    }
+
+
+    @Test
+    public void createTopic_shouldSkipAAFAuthorization_whenCadiIsEnabled_andTopicNameNotEnforced() throws Exception {
+        //given
+        String topicName = "UNAUTHENTICATED.PRH.REGISTRATION";
+
+        when(dmaapKafkaMetaBroker.createTopic(eq(topicName), any(), anyString(), anyInt(), anyInt(), anyBoolean()))
+                .thenReturn(createdTopic);
+
+        givenTopicBean(topicName);
+
+        //when
+        topicService.createTopic(dmaapContext, topicBean);
+
+        //then
+        verify(dmaapKafkaMetaBroker).createTopic(eq(topicName), any(), anyString(), anyInt(), anyInt(),
+                anyBoolean());
+        verify(topicService).respondOk(eq(dmaapContext), any(JSONObject.class));
+        verify(httpServReq, never()).isUserInRole(TOPIC_CREATE_PEM);
+    }
+
+    @Test
+    public void createTopic_shouldSkipAAFAuthorization_whenCADIdisabled() throws Exception {
+        //given
+        String topicName = "org.onap.dmaap.mr.topic-2";
+        givenTopicBean(topicName);
+
+        when(dmaapKafkaMetaBroker.createTopic(eq(topicName), any(), anyString(), anyInt(), anyInt(), anyBoolean()))
+                .thenReturn(createdTopic);
+
+        //when
+        topicService.createTopic(dmaapContext, topicBean);
+
+        //then
+        verify(dmaapKafkaMetaBroker).createTopic(eq(topicName), any(), anyString(), anyInt(), anyInt(),
+                anyBoolean());
+        verify(topicService).respondOk(eq(dmaapContext), any(JSONObject.class));
+        verify(httpServReq, never()).isUserInRole(TOPIC_CREATE_PEM);
+    }
+
+    @Test
+    public void createTopic_shouldPass_whenCADIisDisabled_andNoUserInDmaapContext() throws Exception {
+        //given
+        String topicName = "org.onap.dmaap.mr.topic-3";
+        givenTopicBean(topicName);
+
+        doReturn(null).when(topicService).getDmaapAuthenticatedUser(dmaapContext);
+        when(dmaapKafkaMetaBroker.createTopic(eq(topicName), any(), anyString(), anyInt(), anyInt(), anyBoolean()))
+                .thenReturn(createdTopic);
+
+        //when
+        topicService.createTopic(dmaapContext, topicBean);
+
+        //then
+        verify(dmaapKafkaMetaBroker).createTopic(eq(topicName), any(), anyString(), anyInt(), anyInt(),
+                anyBoolean());
+        verify(topicService).respondOk(eq(dmaapContext), any(JSONObject.class));
+    }
+
+    @Test
+    public void createTopic_shouldPassWithAAFauthorization_whenCadiIsEnabled_andTopicNameWithEnforcedPrefix() throws Exception {
+        //given
+        String topicName = "org.onap.dmaap.mr.topic-4";
+        givenTopicBean(topicName);
+
+        Principal user = new UserPrincipal(){
+            @Override
+            public String getName(){
+                return "user";
+            }
+        };
+        when(topicService.isCadiEnabled()).thenReturn(true);
+        when(httpServReq.isUserInRole(TOPIC_CREATE_PEM)).thenReturn(true);
+        when(httpServReq.getUserPrincipal()).thenReturn(user);
+        when(dmaapKafkaMetaBroker.createTopic(eq(topicName), any(), eq("user"), anyInt(), anyInt(), anyBoolean()))
+                .thenReturn(createdTopic);
+
+        //when
+        topicService.createTopic(dmaapContext, topicBean);
+
+        //then
+        verify(httpServReq).isUserInRole(TOPIC_CREATE_PEM);
+        verify(dmaapKafkaMetaBroker).createTopic(eq(topicName), any(), eq("user"), anyInt(), anyInt(), anyBoolean());
+        verify(topicService).respondOk(eq(dmaapContext), any(JSONObject.class));
+        verify(topicService, never()).getDmaapAuthenticatedUser(dmaapContext);
+    }
+
+    @Test
+    public void createTopic_shouldFailWithAAFauthorization_whenCadiIsEnabled_andTopicNameWithEnforcedPrefix() throws Exception {
+        //given
+        thrown.expect(DMaaPAccessDeniedException.class);
+
+        String topicName = "org.onap.dmaap.mr.topic-5";
+        givenTopicBean(topicName);
+
+        Principal user = new Principal(){
+            @Override
+            public String getName(){
+                return "user";
+            }
+        };
+        when(topicService.isCadiEnabled()).thenReturn(true);
+        when(httpServReq.isUserInRole(TOPIC_CREATE_PEM)).thenReturn(false);
+        when(httpServReq.getUserPrincipal()).thenReturn(user);
+
+        //when
+        topicService.createTopic(dmaapContext, topicBean);
+
+        //then
+        verify(httpServReq).isUserInRole(TOPIC_CREATE_PEM);
+        verify(topicService, never()).getDmaapAuthenticatedUser(dmaapContext);
+        verifyZeroInteractions(dmaapKafkaMetaBroker);
+        verifyZeroInteractions(createdTopic);
+    }
+
+    @Test
+    public void createTopic_shouldThrowApiException_whenBrokerThrowsConfigDbException() throws Exception {
+        //given
+        thrown.expect(CambriaApiException.class);
+
+        String topicName = "org.onap.dmaap.mr.topic-6";
+        givenTopicBean(topicName);
+
+        when(dmaapKafkaMetaBroker.createTopic(eq(topicName), any(), any(), anyInt(), anyInt(), anyBoolean()))
+                .thenThrow(new ConfigDbException("fail"));
+
+        //when
+        topicService.createTopic(dmaapContext, topicBean);
+
+        //then
+        verifyZeroInteractions(createdTopic);
+    }
+
+    @Test
+    public void createTopic_shouldFailGracefully_whenTopicExistsExceptionOccurs() throws Exception {
+        //given
+        String topicName = "org.onap.dmaap.mr.topic-7";
+        givenTopicBean(topicName);
+
+        when(dmaapKafkaMetaBroker.createTopic(eq(topicName), any(), anyString(), anyInt(), anyInt(), anyBoolean()))
+                .thenThrow(new Broker1.TopicExistsException("enfTopicNamePlusExtra"));
+
+        //when
+        topicService.createTopic(dmaapContext, topicBean);
+
+        //then
+        verifyZeroInteractions(createdTopic);
+    }
+
+    @Test
+    public void getValueOrDefault_shouldParseDeafultAndReturnIt_whenGivenValueIsZero() {
+        //given
+        int value = 0;
+        String defaultPropertyName = "propertyName";
+        when(topicService.getPropertyFromAJSCmap(defaultPropertyName)).thenReturn("6");
+
+        //when
+        int extracted = topicService.getValueOrDefault(value, defaultPropertyName);
+
+        //then
+        assertEquals(6, extracted);
+    }
+
+    @Test
+    public void getValueOrDefault_shouldReturnGivenValue_whenGreaterThanZero() {
+        //given
+        int value = 3;
+        String defaultPropertyName = "propertyName";
+
+        //when
+        int extracted = topicService.getValueOrDefault(value, defaultPropertyName);
+
+        //then
+        assertEquals(value, extracted);
+        verify(topicService, never()).getPropertyFromAJSCmap(defaultPropertyName);
+    }
+
+    @Test
+    public void getValueOrDefault_shouldParseDeafultAndReturnIt_whenGivenValueIsNegative() {
+        //given
+        int value = -3;
+        String defaultPropertyName = "propertyName";
+        when(topicService.getPropertyFromAJSCmap(defaultPropertyName)).thenReturn("6");
+
+        //when
+        int extracted = topicService.getValueOrDefault(value, defaultPropertyName);
+
+        //then
+        assertEquals(6, extracted);
+    }
+
+    @Test
+    public void getValueOrDefault_shouldReturnOne_whenGivenValueIsZero_andDefaultNotProvided() {
+        //given
+        int value = 0;
+        String defaultPropertyName = "propertyName";
+        when(topicService.getPropertyFromAJSCmap(defaultPropertyName)).thenReturn("");
+
+        //when
+        int extracted = topicService.getValueOrDefault(value, defaultPropertyName);
+
+        //then
+        assertEquals(1, extracted);
+    }
+
+    @Test
+    public void getValueOrDefault_shouldReturnOne_whenGivenValueIsZero_andDefaultNaN() {
+        //given
+        int value = 0;
+        String defaultPropertyName = "propertyName";
+        when(topicService.getPropertyFromAJSCmap(defaultPropertyName)).thenReturn("a");
+
+        //when
+        int extracted = topicService.getValueOrDefault(value, defaultPropertyName);
+
+        //then
+        assertEquals(1, extracted);
+    }
+
+    @Test(expected = TopicExistsException.class)
+    public void testGetTopics_null_topic() throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException {
+
+        Assert.assertNotNull(topicService);
+        when(dmaapKafkaMetaBroker.getTopic(anyString())).thenReturn(null);
+
+        topicService.getTopic(dmaapContext, "topicName");
+    }
+
+    @Test
+    public void testGetTopics_NonNull_topic() throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException {
+
+        Assert.assertNotNull(topicService);
+
+        when(dmaapKafkaMetaBroker.getTopic(anyString())).thenReturn(createdTopic);
+
+        when(createdTopic.getName()).thenReturn("topicName");
+        when(createdTopic.getDescription()).thenReturn("topicDescription");
+        when(createdTopic.getOwners()).thenReturn(new HashSet<>(Arrays.asList("user1,user2".split(","))));
+
+        when(createdTopic.getReaderAcl()).thenReturn(nsaAcl);
+        when(createdTopic.getWriterAcl()).thenReturn(nsaAcl);
+
+        topicService.getTopic(dmaapContext, "topicName");
+    }
+
+    @Test(expected = TopicExistsException.class)
+    public void testGetPublishersByTopicName_nullTopic() throws DMaaPAccessDeniedException, CambriaApiException,
+            IOException, TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(null);
+
+        topicService.getPublishersByTopicName(dmaapContext, "topicNamespace.name");
+
+    }
+
+    @Test
+    public void testGetPublishersByTopicName_nonNullTopic() throws DMaaPAccessDeniedException, CambriaApiException,
+            IOException, TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(createdTopic);
+        when(createdTopic.getWriterAcl()).thenReturn(nsaAcl);
+        topicService.getPublishersByTopicName(dmaapContext, "topicNamespace.name");
+    }
+
+    @Test(expected = TopicExistsException.class)
+    public void testGetConsumersByTopicName_nullTopic() throws DMaaPAccessDeniedException, CambriaApiException,
+            IOException, TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(null);
+
+        topicService.getConsumersByTopicName(dmaapContext, "topicNamespace.name");
+
+    }
+
+    @Test
+    public void testGetConsumersByTopicName_nonNullTopic() throws DMaaPAccessDeniedException, CambriaApiException,
+            IOException, TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(createdTopic);
+
+        when(createdTopic.getReaderAcl()).thenReturn(nsaAcl);
+
+        topicService.getConsumersByTopicName(dmaapContext, "topicNamespace.name");
+    }
+
+    @Test
+    public void testGetPublishersByTopicName() throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(createdTopic);
+
+        topicService.getPublishersByTopicName(dmaapContext, "topicNamespace.name");
+    }
+
+    @Test(expected = TopicExistsException.class)
+    public void testGetPublishersByTopicNameError() throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(null);
+
+        topicService.getPublishersByTopicName(dmaapContext, "topicNamespace.name");
+    }
+
+    @Test
+    public void deleteTopic_shouldDeleteTopic_whenUserAuthorizedWithAAF_andTopicExists() throws Exception {
+        //given
+        String topicName = "org.onap.dmaap.mr.topic-9";
+        when(topicService.isCadiEnabled()).thenReturn(true);
+        when(httpServReq.isUserInRole(TOPIC_DELETE_PEM)).thenReturn(true);
+        when(dmaapKafkaMetaBroker.getTopic(topicName)).thenReturn(createdTopic);
+
+        //when
+        topicService.deleteTopic(dmaapContext, topicName);
+
+        //then
+        verify(httpServReq).isUserInRole(TOPIC_DELETE_PEM);
+        verify(topicService).respondOk(eq(dmaapContext), contains(topicName));
+        verify(topicService, never()).getDmaapAuthenticatedUser(dmaapContext);
+    }
+
+    @Test
+    public void deleteTopic_shouldSkipAAFauthorization_whenTopicNameNotEnforced() throws Exception {
+        //given
+        String topicName = "UNAUTHENTICATED.PRH.READY";
+        when(topicService.isCadiEnabled()).thenReturn(true);
+        when(dmaapKafkaMetaBroker.getTopic(topicName)).thenReturn(createdTopic);
+
+        //when
+        topicService.deleteTopic(dmaapContext, topicName);
+
+        //then
+        verify(httpServReq, never()).isUserInRole(TOPIC_DELETE_PEM);
+        verify(topicService).respondOk(eq(dmaapContext), contains(topicName));
+    }
+
+    @Test
+    public void deleteTopic_shouldDeleteTopic_whenUserAuthorizedInContext_andTopicExists() throws Exception {
+        //given
+        String topicName = "org.onap.dmaap.mr.topic-10";
+        when(dmaapKafkaMetaBroker.getTopic(topicName)).thenReturn(createdTopic);
+
+        //when
+        topicService.deleteTopic(dmaapContext, topicName);
+
+        //then
+        verify(httpServReq, never()).isUserInRole(TOPIC_DELETE_PEM);
+        verify(topicService).respondOk(eq(dmaapContext), contains(topicName));
+    }
+
+    @Test
+    public void deleteTopic_shouldNotDeleteTopic_whenUserNotAuthorizedByAAF() throws Exception {
+        //given
+        String topicName = "org.onap.dmaap.mr.topic-10";
+        thrown.expect(DMaaPAccessDeniedException.class);
+
+        when(topicService.isCadiEnabled()).thenReturn(true);
+        when(httpServReq.isUserInRole(TOPIC_DELETE_PEM)).thenReturn(false);
+
+        //when
+        topicService.deleteTopic(dmaapContext, topicName);
+
+        //then
+        verify(httpServReq).isUserInRole(TOPIC_DELETE_PEM);
+        verify(topicService, never()).respondOk(eq(dmaapContext), anyString());
+        verify(topicService, never()).getDmaapAuthenticatedUser(dmaapContext);
+    }
+
+    @Test
+    public void deleteTopic_shouldNotDeleteTopic_whenTopicDoesNotExist() throws Exception {
+        //given
+        String topicName = "org.onap.dmaap.mr.topic-10";
+        thrown.expect(TopicExistsException.class);
+
+        when(dmaapKafkaMetaBroker.getTopic(topicName)).thenReturn(null);
+
+        //when
+        topicService.deleteTopic(dmaapContext, topicName);
+
+        //then
+        verify(topicService, never()).respondOk(eq(dmaapContext), anyString());
+    }
+
+    @Test
+    public void testPermitConsumerForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic);
+        TopicBean topicBean = new TopicBean();
+        topicBean.setTopicName("enfTopicNamePlusExtra");
+
+        topicService.permitConsumerForTopic(dmaapContext, "topicNamespace.topic", "admin");
+    }
+
+    @Test(expected = TopicExistsException.class)
+    public void testPermitConsumerForTopic_nulltopic()
+            throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null);
+        TopicBean topicBean = new TopicBean();
+        topicBean.setTopicName("enfTopicNamePlusExtra");
+
+        topicService.permitConsumerForTopic(dmaapContext, "topicNamespace.topic", "admin");
+    }
+
+    @Test
+    public void testdenyConsumerForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic);
+        TopicBean topicBean = new TopicBean();
+        topicBean.setTopicName("enfTopicNamePlusExtra");
+
+        topicService.denyConsumerForTopic(dmaapContext, "topicNamespace.topic", "admin");
+    }
+
+    @Test(expected = TopicExistsException.class)
+    public void testdenyConsumerForTopic_nulltopic()
+            throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null);
+        TopicBean topicBean = new TopicBean();
+        topicBean.setTopicName("enfTopicNamePlusExtra");
+
+        topicService.denyConsumerForTopic(dmaapContext, "topicNamespace.topic", "admin");
+    }
+
+
+    @Test
+    public void testPermitPublisherForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic);
+        TopicBean topicBean = new TopicBean();
+        topicBean.setTopicName("enfTopicNamePlusExtra");
+
+        topicService.permitPublisherForTopic(dmaapContext, "topicNamespace.topic", "admin");
+    }
+
+    @Test(expected = TopicExistsException.class)
+    public void testPermitPublisherForTopic_nulltopic()
+            throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null);
+        TopicBean topicBean = new TopicBean();
+        topicBean.setTopicName("enfTopicNamePlusExtra");
+
+        topicService.permitPublisherForTopic(dmaapContext, "topicNamespace.topic", "admin");
+    }
+
+    @Test
+    public void testDenyPublisherForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic);
+        TopicBean topicBean = new TopicBean();
+        topicBean.setTopicName("enfTopicNamePlusExtra");
+
+        topicService.denyPublisherForTopic(dmaapContext, "topicNamespace.topic", "admin");
+        ;
+    }
+
+    @Test(expected = TopicExistsException.class)
+    public void testDenyPublisherForTopic_nulltopic()
+            throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null);
+        TopicBean topicBean = new TopicBean();
+        topicBean.setTopicName("enfTopicNamePlusExtra");
+
+        topicService.denyPublisherForTopic(dmaapContext, "topicNamespace.topic", "admin");
+        ;
+    }
+
+    @Test
+    public void testGetAllTopics() throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        TopicBean topicBean = new TopicBean();
+        topicBean.setTopicName("enfTopicNamePlusExtra");
+
+        topicService.getAllTopics(dmaapContext);
+    }
+
+    @Test
+    public void testGetTopics() throws DMaaPAccessDeniedException, CambriaApiException, IOException,
+            TopicExistsException, JSONException, ConfigDbException, AccessDeniedException {
+
+        Assert.assertNotNull(topicService);
+
+        TopicBean topicBean = new TopicBean();
+        topicBean.setTopicName("enfTopicNamePlusExtra");
+
+        topicService.getTopics(dmaapContext);
+    }
+
+
+}
+
diff --git a/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSenderTest.java b/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSenderTest.java
new file mode 100644 (file)
index 0000000..3753722
--- /dev/null
@@ -0,0 +1,128 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.apiServer.metrics.cambria;
+
+
+import static org.junit.Assert.assertTrue;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import java.io.File;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class DMaaPMetricsSenderTest {
+
+       @Before
+       public void setUp() throws Exception {
+               ClassLoader classLoader = getClass().getClassLoader();          
+               AJSCPropertiesMap.refresh(new File(classLoader.getResource("MsgRtrApi.properties").getFile()));
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+       
+       @Test
+       public void testSendPeriodically() {
+               
+               DMaaPMetricsSender sender = new DMaaPMetricsSender(null, "url", "testTopic");
+               try {
+                       sender.sendPeriodically(null, null, "testTopic");
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               } catch (NoClassDefFoundError e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               }                       
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testSendPeriodically2() {
+               
+               DMaaPMetricsSender sender = new DMaaPMetricsSender(null, "url", "testTopic");
+               try {
+                       sender.sendPeriodically(null, null, "url", "testTopic", 2);
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               }       
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testSend() {
+               
+               DMaaPMetricsSender sender = new DMaaPMetricsSender(null, "url", "testTopic");
+               try {
+                       sender.send();
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               }               
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testRun() {
+               
+               DMaaPMetricsSender sender = new DMaaPMetricsSender(null, "url", "testTopic");
+               try {
+                       sender.run();
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               }               
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..9758383
--- /dev/null
@@ -0,0 +1,42 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.apiServer.metrics.cambria;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({DMaaPMetricsSenderTest.class})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/TestRunner.java b/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/TestRunner.java
new file mode 100644 (file)
index 0000000..c1150e9
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.apiServer.metrics.cambria;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiExceptionTest.java b/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiExceptionTest.java
new file mode 100644 (file)
index 0000000..cb37071
--- /dev/null
@@ -0,0 +1,71 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.exception.ErrorResponse;
+
+import static org.junit.Assert.assertTrue;
+
+public class CambriaApiExceptionTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetErrRes() {
+               
+               int status = 1;
+               String msg = "helloWorld";
+               CambriaApiException cambria = new CambriaApiException(status, msg);
+               
+               cambria.getErrRes();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testSetErrRes() {
+               
+               int status = 1;
+               String msg = "helloWorld";
+               CambriaApiException cambria = new CambriaApiException(status, msg);
+               
+               cambria.setErrRes(new ErrorResponse(200, 0, "OK"));
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiTestCase.java b/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiTestCase.java
new file mode 100644 (file)
index 0000000..0a1af90
--- /dev/null
@@ -0,0 +1,50 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.cambria;
+
+import junit.framework.TestCase;
+import org.junit.Ignore;
+
+import java.util.HashMap;
+import java.util.Map;
+
+@Ignore
+public class CambriaApiTestCase extends TestCase {
+       
+       @Override
+       protected void setUp() throws Exception {
+               final Map<String, String> argMap = new HashMap<String, String> ();
+               
+               argMap.put("broker.type", "memory");
+               argMap.put("accounts.dao.class", "com.att.nsa.fe3c.dao.memory.MemoryDAOFactory");
+               argMap.put("topic.dao.class", "com.att.nsa.fe3c.dao.memory.MemoryDAOFactory");
+
+               //CambriaApiServer.start(argMap);
+               System.out.println("setUp() complete");
+       }
+       
+       public void tearDown() throws Exception {
+               System.out.println("tearDown() started");
+               //CambriaApiServer.stop();
+               System.out.println("tearDown() complete");
+       }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiVersionInfoTest.java b/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiVersionInfoTest.java
new file mode 100644 (file)
index 0000000..39bcac0
--- /dev/null
@@ -0,0 +1,52 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.CambriaApiVersionInfo;
+
+import static org.junit.Assert.assertTrue;
+
+public class CambriaApiVersionInfoTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetVersion() {
+               CambriaApiVersionInfo.getVersion();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/CambriaRateLimiterTest.java b/src/test/java/org/onap/dmaap/mr/cambria/CambriaRateLimiterTest.java
new file mode 100644 (file)
index 0000000..51b617b
--- /dev/null
@@ -0,0 +1,74 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.cambria;
+
+import org.junit.Test;
+
+public class CambriaRateLimiterTest 
+{
+       @Test
+       public void testRateLimiter ()
+       {
+               /*final NsaTestClock clock = new NsaTestClock(1, false);
+
+               final String topic = "topic";
+               final String consumerGroup = "group";
+               final String clientId = "id";
+
+               final int window = 5;
+
+               // rate limit: 1 empty call/min avg over 5 minutes, with 10ms delay
+               final CambriaRateLimiter rater = new CambriaRateLimiter ( 1.0, window, 10 );
+               try
+               {
+                       // prime with a call to start rate window
+                       rater.onCall ( topic, consumerGroup, clientId );
+                       rater.onSend ( topic, consumerGroup, clientId, 1 );
+                       clock.addMs ( 1000*60*window );
+
+                       // rate should now be 0, with a good window
+                       for ( int i=0; i<4; i++ )
+                       {
+                               clock.addMs ( 1000*15 );
+                               rater.onCall ( topic, consumerGroup, clientId );
+                               rater.onSend ( topic, consumerGroup, clientId, 0 );
+                       }
+                       // rate is now 0.8 = 4 calls in last 5 minutes = 4/5 = 0.8
+
+                       clock.addMs ( 1000*15 );
+                       rater.onCall ( topic, consumerGroup, clientId );
+                       rater.onSend ( topic, consumerGroup, clientId, 0 );
+                               // rate = 1.0 = 5 calls in last 5 mins
+
+                       clock.addMs ( 1000 );
+                       rater.onCall ( topic, consumerGroup, clientId );
+                       rater.onSend ( topic, consumerGroup, clientId, 0 );
+                               // rate = 1.2 = 6 calls in last 5 mins, should fire
+
+                       fail ( "Should have thrown rate limit exception." );
+               }
+               catch ( CambriaApiException x )
+               {
+                       // good
+               }*/
+       }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..bcac8d9
--- /dev/null
@@ -0,0 +1,43 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({CambriaApiExceptionTest.class, CambriaApiVersionInfoTest.class,
+    CambriaApiTestCase.class, CambriaRateLimiterTest.class,})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/TestRunner.java
new file mode 100644 (file)
index 0000000..8465cbe
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/CuratorFrameworkImpl.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/CuratorFrameworkImpl.java
new file mode 100644 (file)
index 0000000..2f57abf
--- /dev/null
@@ -0,0 +1,278 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.backends.kafka;
+
+import java.util.concurrent.TimeUnit;
+
+import org.apache.curator.CuratorZookeeperClient;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.WatcherRemoveCuratorFramework;
+import org.apache.curator.framework.api.CreateBuilder;
+import org.apache.curator.framework.api.CuratorListener;
+import org.apache.curator.framework.api.DeleteBuilder;
+import org.apache.curator.framework.api.ExistsBuilder;
+import org.apache.curator.framework.api.GetACLBuilder;
+import org.apache.curator.framework.api.GetChildrenBuilder;
+import org.apache.curator.framework.api.GetConfigBuilder;
+import org.apache.curator.framework.api.GetDataBuilder;
+import org.apache.curator.framework.api.ReconfigBuilder;
+import org.apache.curator.framework.api.RemoveWatchesBuilder;
+import org.apache.curator.framework.api.SetACLBuilder;
+import org.apache.curator.framework.api.SetDataBuilder;
+import org.apache.curator.framework.api.SyncBuilder;
+import org.apache.curator.framework.api.UnhandledErrorListener;
+import org.apache.curator.framework.api.transaction.CuratorMultiTransaction;
+import org.apache.curator.framework.api.transaction.CuratorTransaction;
+import org.apache.curator.framework.api.transaction.TransactionOp;
+import org.apache.curator.framework.imps.CuratorFrameworkState;
+import org.apache.curator.framework.listen.Listenable;
+import org.apache.curator.framework.schema.SchemaSet;
+import org.apache.curator.framework.state.ConnectionStateErrorPolicy;
+import org.apache.curator.framework.state.ConnectionStateListener;
+import org.apache.curator.utils.EnsurePath;
+import org.apache.zookeeper.Watcher;
+import org.apache.zookeeper.server.quorum.flexible.QuorumVerifier;
+
+public class CuratorFrameworkImpl implements CuratorFramework {
+
+       @Override
+       public void blockUntilConnected() throws InterruptedException {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public boolean blockUntilConnected(int arg0, TimeUnit arg1) throws InterruptedException {
+               // TODO Auto-generated method stub
+               return false;
+       }
+
+       @Override
+       public ExistsBuilder checkExists() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public void clearWatcherReferences(Watcher arg0) {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public void close() {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public CreateBuilder create() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public DeleteBuilder delete() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public GetACLBuilder getACL() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public GetChildrenBuilder getChildren() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public Listenable<ConnectionStateListener> getConnectionStateListenable() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public Listenable<CuratorListener> getCuratorListenable() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public GetDataBuilder getData() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public String getNamespace() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public CuratorFrameworkState getState() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public Listenable<UnhandledErrorListener> getUnhandledErrorListenable() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public CuratorZookeeperClient getZookeeperClient() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public CuratorTransaction inTransaction() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public boolean isStarted() {
+               // TODO Auto-generated method stub
+               return false;
+       }
+
+       @Override
+       public EnsurePath newNamespaceAwareEnsurePath(String arg0) {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public CuratorFramework nonNamespaceView() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public SetACLBuilder setACL() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public SetDataBuilder setData() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public void start() {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public SyncBuilder sync() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public void sync(String arg0, Object arg1) {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public CuratorFramework usingNamespace(String arg0) {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public ReconfigBuilder reconfig() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public GetConfigBuilder getConfig() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public CuratorMultiTransaction transaction() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public TransactionOp transactionOp() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public void createContainers(String path) throws Exception {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public RemoveWatchesBuilder watches() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public WatcherRemoveCuratorFramework newWatcherRemoveCuratorFramework() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public ConnectionStateErrorPolicy getConnectionStateErrorPolicy() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public QuorumVerifier getCurrentConfig() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public SchemaSet getSchemaSet() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public boolean isZk34CompatibilityMode() {
+               // TODO Auto-generated method stub
+               return false;
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..d299fdd
--- /dev/null
@@ -0,0 +1,43 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.backends.kafka;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({KafkaConsumerCacheTest.class, KafkaPublisherTest.class, Kafka011ConsumerTest.class,
+    KafkaLiveLockAvoider2Test.class})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/Kafka011ConsumerTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/Kafka011ConsumerTest.java
new file mode 100644 (file)
index 0000000..10526c5
--- /dev/null
@@ -0,0 +1,91 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.cambria.backends.kafka;
+
+import static org.junit.Assert.assertNotNull;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.onap.dmaap.dmf.mr.backends.kafka.Kafka011Consumer;
+import org.onap.dmaap.dmf.mr.backends.kafka.KafkaLiveLockAvoider2;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"})
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({ AJSCPropertiesMap.class })
+public class Kafka011ConsumerTest {
+       
+               
+       @Mock
+       private KafkaConsumer<String, String> cc;
+       @Mock
+       private KafkaLiveLockAvoider2 klla;
+
+       @Before
+       public void setUp() throws Exception {
+               MockitoAnnotations.initMocks(this);
+               
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+       
+       @Test
+       public void testKafka011Consumer() {
+               PowerMockito.mockStatic(AJSCPropertiesMap.class);
+               PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "consumer.timeout")).thenReturn("10");
+               Kafka011Consumer consumer=null;
+               try {
+                        consumer= new Kafka011Consumer("topic", "group", "id", cc, klla)       ;
+                        consumer.commitOffsets();
+                        consumer.touch();
+                        consumer.setOffset(10);
+               } catch (Exception e) {
+                       
+               }
+               assertNotNull(consumer);
+               assertNotNull(consumer.getConsumer());
+               assertNotNull(consumer.getConsumerGroup());
+               assertNotNull(consumer.getConsumerId());
+               assertNotNull(consumer.getConsumerId());
+               assertNotNull(consumer.getCreateTimeMs());
+               assertNotNull(consumer.getLastAccessMs());
+               assertNotNull(consumer.getName());
+               assertNotNull(consumer.getOffset());
+               assertNotNull(consumer.getLastTouch());
+               
+               
+       }
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaConsumerCacheTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaConsumerCacheTest.java
new file mode 100644 (file)
index 0000000..457fff4
--- /dev/null
@@ -0,0 +1,250 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.backends.kafka;
+
+import static org.junit.Assert.*;
+
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.modules.junit4.PowerMockRunner;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+
+import org.onap.dmaap.dmf.mr.backends.MetricsSet;
+import org.onap.dmaap.dmf.mr.backends.kafka.Kafka011Consumer;
+import org.onap.dmaap.dmf.mr.backends.kafka.KafkaConsumerCache;
+import org.onap.dmaap.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+
+@PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"})
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({ AJSCPropertiesMap.class })
+public class KafkaConsumerCacheTest {
+       private KafkaConsumerCache kafkaConsumerCache =null;
+       @Mock
+       private ConcurrentHashMap<String, Kafka011Consumer> fConsumers;
+       @Mock
+       private MetricsSet fMetrics;
+
+       @Before
+       public void setUp() throws Exception {
+               MockitoAnnotations.initMocks(this);
+               
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+
+       @Test
+       public void testSweep() {
+               kafkaConsumerCache = new KafkaConsumerCache();
+               PowerMockito.mockStatic(AJSCPropertiesMap.class);
+               PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "kSetting_TouchEveryMs")).thenReturn("100");
+               kafkaConsumerCache.sweep();
+
+       }
+       
+
+       // DOES NOT WORK
+       @Test
+       public void testStartCache() {
+
+               /*
+                * KafkaConsumerCache kafka = null;
+                * 
+                * try { kafka = new KafkaConsumerCache("123", null);
+                * 
+                * } catch (NoClassDefFoundError e) { try { kafka.startCache("DMAAP",
+                * null); } catch (NullPointerException e1) { // TODO Auto-generated
+                * catch block assertTrue(true); } catch (KafkaConsumerCacheException
+                * e1) { // TODO Auto-generated catch block e1.printStackTrace(); } }
+                */
+
+               
+               new CuratorFrameworkImpl();
+               new MetricsSetImpl();
+               KafkaConsumerCache kafka=null;
+               try {
+                       kafka = new KafkaConsumerCache();
+                       kafka.setfApiId("1");
+                       kafka.startCache("DMAAP", null);
+               } catch (NoClassDefFoundError e) {
+
+               } catch (KafkaConsumerCacheException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+       }
+
+       @Test
+       public void testGetCuratorFramework() {
+
+               CuratorFramework curator = new CuratorFrameworkImpl();
+               new MetricsSetImpl();
+               try {
+
+               } catch (NoClassDefFoundError e) {
+
+                       KafkaConsumerCache.getCuratorFramework(curator);
+               }
+
+       }
+
+       /*
+        * @Test public void testStopCache() {
+        * 
+        * KafkaConsumerCache kafka = null; new CuratorFrameworkImpl(); new
+        * MetricsSetImpl(); try { kafka = new KafkaConsumerCache("123", null);
+        * kafka.stopCache(); } catch (NoClassDefFoundError e) {
+        * 
+        * }
+        * 
+        * }
+        */
+
+       @Test
+       public void testGetConsumerFor() {
+
+               KafkaConsumerCache kafka = null;
+
+               try {
+                       kafka = new KafkaConsumerCache();
+                       kafka.getConsumerFor("testTopic", "CG1", "23");
+               } catch (NoClassDefFoundError e) {
+
+               } catch (KafkaConsumerCacheException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+       }
+
+       @Test
+       public void testPutConsumerFor() {
+
+               Kafka011Consumer consumer = null;
+               KafkaConsumerCache kafka = null;
+
+               try {
+                       kafka = new KafkaConsumerCache();
+
+               } catch (NoClassDefFoundError e) {
+                       try {
+                               kafka.putConsumerFor("testTopic", "CG1", "23", consumer);
+                       } catch (NullPointerException e1) {
+                               // TODO Auto-generated catch block
+                               assertTrue(true);
+                       } catch (KafkaConsumerCacheException e1) {
+                               // TODO Auto-generated catch block
+                               e1.printStackTrace();
+                       }
+               }
+
+       }
+
+       @Test
+       public void testGetConsumers() {
+
+               KafkaConsumerCache kafka = null;
+
+               try {
+                       kafka = new KafkaConsumerCache();
+
+               } catch (NoClassDefFoundError e) {
+                       try {
+                               kafka.getConsumers();
+                       } catch (NullPointerException e1) {
+                               // TODO Auto-generated catch block
+                               assertTrue(true);
+                       }
+               }
+
+       }
+
+       @Test
+       public void testDropAllConsumers() {
+
+               KafkaConsumerCache kafka = null;
+               try {
+                       kafka = new KafkaConsumerCache();
+
+               } catch (NoClassDefFoundError e) {
+                       try {
+                               kafka.dropAllConsumers();
+                       } catch (NullPointerException e1) {
+                               // TODO Auto-generated catch block
+                               assertTrue(true);
+                       }
+               }
+
+       } 
+
+       @Test
+       public void testSignalOwnership() {
+
+               KafkaConsumerCache kafka = null;
+
+               try {
+                       kafka = new KafkaConsumerCache();
+                try {
+                       kafka.signalOwnership("testTopic", "CG1", "23");
+               } catch (KafkaConsumerCacheException e) {
+                       assertTrue(true);
+               }
+               } catch (NoClassDefFoundError e) {}
+
+               // 
+       }
+
+       @Test
+       public void testDropConsumer() {
+
+               KafkaConsumerCache kafka = null;
+
+               try {
+                       kafka = new KafkaConsumerCache();
+                       // kafka.dropConsumer("testTopic", "CG1", "23");
+               } catch (NoClassDefFoundError e) {
+                       try {
+                               kafka.dropConsumer("testTopic", "CG1", "23");
+                       } catch (NullPointerException e1) {
+                               // TODO Auto-generated catch block
+                               assertTrue(true);
+                       }
+               }
+
+       }
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaLiveLockAvoider2Test.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaLiveLockAvoider2Test.java
new file mode 100644 (file)
index 0000000..7f81641
--- /dev/null
@@ -0,0 +1,109 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.cambria.backends.kafka;
+
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.api.CreateBuilder;
+import org.apache.curator.framework.api.ExistsBuilder;
+import org.apache.curator.framework.api.GetChildrenBuilder;
+import org.apache.curator.framework.api.ProtectACLCreateModeStatPathAndBytesable;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.onap.dmaap.dmf.mr.backends.kafka.KafkaLiveLockAvoider2;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"})
+@RunWith(PowerMockRunner.class)
+public class KafkaLiveLockAvoider2Test {
+       
+       @Mock
+       private CuratorFramework curatorFramework;
+       @Mock
+       private ExistsBuilder existsBuilder;
+       @Mock
+       private CreateBuilder createBuilder;
+       @Mock
+       private GetChildrenBuilder childrenBuilder;
+       @Mock
+       ProtectACLCreateModeStatPathAndBytesable<String> acl;
+       @InjectMocks
+       private KafkaLiveLockAvoider2 liveLockAvoider;
+       
+       public static final String ZNODE_ROOT = "/live-lock-avoid";
+       public static final String ZNODE_LOCKS = "/locks";
+       public static final String ZNODE_UNSTICK_TASKS ="/unstick-tasks";
+       
+       private static String locksPath = ZNODE_ROOT+ZNODE_LOCKS;
+       private static String tasksPath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS;
+       
+
+       @Before
+       public void setUp() throws Exception {
+               List<String> taskNodes= new ArrayList<String>();
+               taskNodes.add("appId");
+               MockitoAnnotations.initMocks(this);
+               PowerMockito.when(acl.forPath(locksPath)).thenReturn(locksPath);
+               PowerMockito.when(acl.forPath(tasksPath)).thenReturn(tasksPath);
+               PowerMockito.when(createBuilder.creatingParentsIfNeeded()).thenReturn(acl);
+               PowerMockito.when(curatorFramework.create()).thenReturn(createBuilder);
+               PowerMockito.when(curatorFramework.checkExists()).thenReturn(existsBuilder);
+               PowerMockito.when(childrenBuilder.forPath(tasksPath)).thenReturn(taskNodes);
+               PowerMockito.when(curatorFramework.getChildren()).thenReturn(childrenBuilder);
+               
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testUnlock(){
+               liveLockAvoider.init();
+               try {
+                       liveLockAvoider.unlockConsumerGroup("appId", "groupName");
+               } catch (Exception e) {
+                       assertTrue(true);
+               }
+       }
+       
+       @Test
+       public void testWatcher(){
+               try {
+                       liveLockAvoider.startNewWatcherForServer("appId", null);
+               } catch (Exception e) {
+                       assertTrue(true);
+               }
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaPublisherTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaPublisherTest.java
new file mode 100644 (file)
index 0000000..7a0fe78
--- /dev/null
@@ -0,0 +1,74 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.backends.kafka;
+
+import static org.junit.Assert.assertTrue;
+
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.MockitoAnnotations;
+import org.onap.dmaap.dmf.mr.backends.kafka.KafkaPublisher;
+import org.onap.dmaap.dmf.mr.utils.Utils;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"})
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({ Utils.class })
+public class KafkaPublisherTest {
+
+       @Before
+       public void setUp() throws Exception {
+               MockitoAnnotations.initMocks(this);
+               PowerMockito.mockStatic(Utils.class);
+               PowerMockito.when(Utils.isCadiEnabled()).thenReturn(true);
+
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testPublisherInit() {
+               
+               
+
+               try {
+                       try {
+                               KafkaPublisher kafkaPublisher = new KafkaPublisher(null);
+                       } catch (missingReqdSetting e) {
+                               assertTrue(true);
+                       }
+               } catch (LinkageError e) {
+                       assertTrue(true);
+               }
+
+       }
+
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/MetricsSetImpl.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/MetricsSetImpl.java
new file mode 100644 (file)
index 0000000..ea36d86
--- /dev/null
@@ -0,0 +1,122 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.backends.kafka;
+
+import com.att.nsa.metrics.CdmMeasuredItem;
+import org.json.JSONObject;
+import org.onap.dmaap.dmf.mr.backends.MetricsSet;
+
+import java.util.List;
+import java.util.Map;
+
+public class MetricsSetImpl implements MetricsSet {
+
+       @Override
+       public List<? extends CdmMetricEntry> getEntries() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public CdmMeasuredItem getItem(String arg0) {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public Map<String, CdmMeasuredItem> getItems() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public void putItem(String arg0, CdmMeasuredItem arg1) {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public void removeItem(String arg0) {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public int size() {
+               // TODO Auto-generated method stub
+               return 0;
+       }
+
+       @Override
+       public JSONObject toJson() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public void setupCambriaSender() {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public void onRouteComplete(String name, long durationMs) {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public void publishTick(int amount) {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public void consumeTick(int amount) {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public void onKafkaConsumerCacheMiss() {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public void onKafkaConsumerCacheHit() {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public void onKafkaConsumerClaimed() {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public void onKafkaConsumerTimeout() {
+               // TODO Auto-generated method stub
+               
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/TestRunner.java
new file mode 100644 (file)
index 0000000..38261e1
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.backends.kafka;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..c38be64
--- /dev/null
@@ -0,0 +1,43 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.backends.memory;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({MemoryConsumerFactoryTest.class, MemoryMetaBrokerTest.class, MemoryQueueTest.class,
+    MemoryQueuePublisherTest.class, MessageLoggerTest.class,})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryConsumerFactoryTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryConsumerFactoryTest.java
new file mode 100644 (file)
index 0000000..ac07506
--- /dev/null
@@ -0,0 +1,80 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.backends.memory;
+
+import static org.junit.Assert.assertTrue;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.backends.memory.MemoryConsumerFactory;
+
+public class MemoryConsumerFactoryTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetConsumerFor() {
+               MemoryConsumerFactory factory = new MemoryConsumerFactory(null);
+               
+               
+               String topic = "testTopic";
+               String consumerGroupId = "CG1";
+               String clientId = "C1";
+               String remoteHost="remoteHost";
+               int timeoutMs = 1000; 
+               factory.getConsumerFor(topic, consumerGroupId, clientId, timeoutMs,remoteHost);
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testDropCache() {
+               MemoryConsumerFactory factory = new MemoryConsumerFactory(null);
+       
+               factory.dropCache();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testGetConsumers() {
+               MemoryConsumerFactory factory = new MemoryConsumerFactory(null);
+                
+               factory.getConsumers();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryMetaBrokerTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryMetaBrokerTest.java
new file mode 100644 (file)
index 0000000..c4715cd
--- /dev/null
@@ -0,0 +1,90 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.backends.memory;
+
+import static org.junit.Assert.*;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.backends.memory.MemoryMetaBroker;
+import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException;
+
+public class MemoryMetaBrokerTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetAllTopics() {
+               MemoryMetaBroker broker = new MemoryMetaBroker(null, null);
+               
+               broker.getAllTopics();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testGeTopic() {
+               MemoryMetaBroker broker = new MemoryMetaBroker(null, null);
+       
+               broker.getTopic("testTopic");
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testCreateTopic() {
+       
+       //uncommenting this gives a Null Pointer Exception      
+               
+               MemoryMetaBroker broker = new MemoryMetaBroker(null, null);
+       
+               int timeoutMs = 1000; 
+               try {
+                       broker.createTopic("testTopic","topic for testing", "ABCD123", 1,3, true);
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }  catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueuePublisherTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueuePublisherTest.java
new file mode 100644 (file)
index 0000000..ff9bd9e
--- /dev/null
@@ -0,0 +1,101 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.backends.memory;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.backends.memory.MemoryQueuePublisher;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertTrue;
+
+
+public class MemoryQueuePublisherTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testSendBatchMessage() {
+               MemoryQueuePublisher publisher = new MemoryQueuePublisher(null, null);
+               
+               try {
+                       publisher.sendBatchMessageNew("testTopic", null);
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testSendMessage() {
+               MemoryQueuePublisher publisher = new MemoryQueuePublisher(null, null);
+
+               try {
+                       publisher.sendMessage("testTopic", null);
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testSendMessages() {
+               MemoryQueuePublisher publisher = new MemoryQueuePublisher(null, null);
+               
+
+               try {
+                       publisher.sendMessages("testTopic", null);
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueueTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueueTest.java
new file mode 100644 (file)
index 0000000..c5afa6d
--- /dev/null
@@ -0,0 +1,93 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.backends.memory;
+
+import static org.junit.Assert.assertTrue;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.backends.memory.MemoryQueue;
+
+
+public class MemoryQueueTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testCreateTopic() {
+               MemoryQueue queue = new MemoryQueue();
+                
+               queue.createTopic("testTopic");
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+       @Test
+       public void testRemoveTopic() {
+               MemoryQueue queue = new MemoryQueue();
+                
+               queue.removeTopic("testTopic");
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testPut() {
+               MemoryQueue queue = new MemoryQueue();
+                
+               try {
+                       queue.put("testTopic", null);
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testGet() {
+               MemoryQueue queue = new MemoryQueue();
+                
+               queue.get("testTopic", "consumer");
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MessageLoggerTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MessageLoggerTest.java
new file mode 100644 (file)
index 0000000..bc2025a
--- /dev/null
@@ -0,0 +1,104 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.backends.memory;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.backends.memory.MessageLogger;
+
+
+public class MessageLoggerTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testSendMessage() {
+               MessageLogger dropper = new MessageLogger();
+               
+               try {
+                       dropper.sendMessage("testTopic", null);
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testSendMessages() {
+               MessageLogger dropper = new MessageLogger();
+                
+               try {
+                       dropper.sendMessages("testTopic", null);
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testSendBatchMessage() {
+               MessageLogger dropper = new MessageLogger();
+               
+               try {
+                       dropper.sendBatchMessageNew("testTopic", null);
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
+
+
+
+
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/TestRunner.java
new file mode 100644 (file)
index 0000000..2f0a215
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.backends.memory;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest.java
new file mode 100644 (file)
index 0000000..e711107
--- /dev/null
@@ -0,0 +1,65 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.ApiKeyBean;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class ApiKeyBeanTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetEmail() {
+               
+               ApiKeyBean bean = new ApiKeyBean("user@onap.com", "testing bean");
+               
+               bean.getEmail();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testApiKeyBean(){
+               ApiKeyBean bean = new ApiKeyBean();
+               bean.setDescription("description");
+               bean.setEmail("email");
+               assertEquals("description", bean.getDescription());
+               assertEquals("email", bean.getEmail());
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest2.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest2.java
new file mode 100644 (file)
index 0000000..0a36e1a
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.ApiKeyBean;
+
+import static org.junit.Assert.assertTrue;
+
+public class ApiKeyBeanTest2 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testSetEmail() {
+               
+               ApiKeyBean bean = new ApiKeyBean("user@onap.com", "testing bean");
+               
+               bean.setEmail("user@onap.com");
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest3.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest3.java
new file mode 100644 (file)
index 0000000..7cc6164
--- /dev/null
@@ -0,0 +1,55 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+import org.onap.dmaap.dmf.mr.beans.ApiKeyBean;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class ApiKeyBeanTest3 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetDescription() {
+               
+               ApiKeyBean bean = new ApiKeyBean("user@onap.com", "testing bean");
+               
+               bean.getDescription();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest4.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest4.java
new file mode 100644 (file)
index 0000000..6f3a2f6
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.ApiKeyBean;
+
+import static org.junit.Assert.assertTrue;
+
+public class ApiKeyBeanTest4 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testSetDescription() {
+               
+               ApiKeyBean bean = new ApiKeyBean("user@onap.com", "testing bean");
+               
+               bean.setDescription("new testing description");
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest5.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest5.java
new file mode 100644 (file)
index 0000000..e11ffde
--- /dev/null
@@ -0,0 +1,55 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+import org.onap.dmaap.dmf.mr.beans.ApiKeyBean;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class ApiKeyBeanTest5 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetSharedSecret() {
+               
+               ApiKeyBean bean = new ApiKeyBean("user@onap.com", "testing bean");
+               
+               bean.getSharedSecret();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest6.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest6.java
new file mode 100644 (file)
index 0000000..a847efa
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.ApiKeyBean;
+
+import static org.junit.Assert.assertTrue;
+
+public class ApiKeyBeanTest6 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetKey() {
+               
+               ApiKeyBean bean = new ApiKeyBean("user@onap.com", "testing bean");
+               
+               bean.getKey();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPCambriaLimiterTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPCambriaLimiterTest.java
new file mode 100644 (file)
index 0000000..f389ab1
--- /dev/null
@@ -0,0 +1,97 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPCambriaLimiter;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class DMaaPCambriaLimiterTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetSleepMsForRate() {
+
+               assertEquals(1000, DMaaPCambriaLimiter.getSleepMsForRate(100));
+               assertEquals(0, DMaaPCambriaLimiter.getSleepMsForRate(0));
+
+       }
+
+       @Test
+       public void testOnCall() {
+
+               DMaaPCambriaLimiter limiter = new DMaaPCambriaLimiter(1, 2, 3);
+               try {
+                       limiter.onCall("testTopic", "ConsumerGroup1", "client2", "remoteHost");
+               } catch (CambriaApiException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+
+       }
+
+       @Test
+       public void testOnCallError2() {
+
+               DMaaPCambriaLimiter limiter = new DMaaPCambriaLimiter(0, 2, 3, 1, 1);
+               try {
+                       limiter.onCall("testTopic", "ConsumerGroup1", "client2", "remoteHost");
+               } catch (CambriaApiException e) {
+                       assertTrue(false);
+               }
+
+       }
+
+       @Test(expected = CambriaApiException.class)
+       public void testOnCallError() throws CambriaApiException {
+
+               DMaaPCambriaLimiter limiter = new DMaaPCambriaLimiter(0.9, 2, 3, 1, 1);
+               limiter.onCall("testTopic", "ConsumerGroup1", "client2", "remoteHost");
+
+       }
+
+       @Test
+       public void testOnSend() {
+
+               DMaaPCambriaLimiter limiter = new DMaaPCambriaLimiter(3, 3, 3);
+               limiter.onSend("testTopic", "consumerGroup1", "client1", 100);
+
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest.java
new file mode 100644 (file)
index 0000000..adbc064
--- /dev/null
@@ -0,0 +1,69 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.springframework.mock.web.MockHttpServletRequest;
+import org.springframework.mock.web.MockHttpSession;
+
+import static org.junit.Assert.*;
+
+public class DMaaPContextTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetBatchID() {
+               
+               DMaaPContext.getBatchID();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testDMaaPContext(){
+               
+               DMaaPContext context=new DMaaPContext();
+               context.setConsumerRequestTime("consumerRequestTime");
+               assertEquals("consumerRequestTime", context.getConsumerRequestTime());
+               MockHttpServletRequest request= new MockHttpServletRequest();
+               MockHttpSession session=new MockHttpSession();
+               request.setSession(session);
+               context.setRequest(request);
+               assertNotNull(context.getSession());
+               
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest2.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest2.java
new file mode 100644 (file)
index 0000000..5bb4567
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+
+public class DMaaPContextTest2 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetRequest() {
+               
+               DMaaPContext context = new DMaaPContext();
+               
+               context.getRequest();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest3.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest3.java
new file mode 100644 (file)
index 0000000..90867a3
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+
+import static org.junit.Assert.assertTrue;
+
+public class DMaaPContextTest3 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetResponse() {
+               
+               DMaaPContext context = new DMaaPContext();
+               
+               context.getResponse();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest4.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest4.java
new file mode 100644 (file)
index 0000000..d84a19c
--- /dev/null
@@ -0,0 +1,58 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.springframework.mock.web.MockHttpServletRequest;
+
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+
+public class DMaaPContextTest4 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetSession() {
+               
+               DMaaPContext context = new DMaaPContext();
+               MockHttpServletRequest request = new MockHttpServletRequest();
+               context.setRequest(request);
+               
+               context.getSession();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest5.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest5.java
new file mode 100644 (file)
index 0000000..565ef36
--- /dev/null
@@ -0,0 +1,55 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+
+public class DMaaPContextTest5 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetConfigReader() {
+               
+               DMaaPContext context = new DMaaPContext();
+               
+               context.getConfigReader();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest6.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest6.java
new file mode 100644 (file)
index 0000000..b7f6e18
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+
+import static org.junit.Assert.assertTrue;
+
+public class DMaaPContextTest6 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetConsumerRequestTime() {
+               
+               DMaaPContext context = new DMaaPContext();
+               
+               context.getConsumerRequestTime();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPKafkaConsumerFactoryTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPKafkaConsumerFactoryTest.java
new file mode 100644 (file)
index 0000000..fd30359
--- /dev/null
@@ -0,0 +1,55 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+*  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *  
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.assertTrue;
+
+import org.junit.After;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaConsumerFactory;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.onap.dmaap.mr.cambria.embed.EmbedConfigurationReader;
+
+
+public class DMaaPKafkaConsumerFactoryTest {
+       
+       EmbedConfigurationReader embedConfigurationReader = new EmbedConfigurationReader();
+       
+       @After
+       public void tearDown() throws Exception {
+               embedConfigurationReader.tearDown();
+       }
+
+@Test  
+public void testConsumerFactory(){
+       
+       try {
+               ConfigurationReader configurationReader = embedConfigurationReader.buildConfigurationReader();
+               DMaaPKafkaConsumerFactory consumerFactory=(DMaaPKafkaConsumerFactory) configurationReader.getfConsumerFactory();
+               consumerFactory.getConsumerFor("topic", "consumerGroupName", "consumerId", 10, "remotehost");
+       } catch (Exception e) {
+               assertTrue(false);
+       }
+       assertTrue(true);
+}
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPKafkaMetaBrokerTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPKafkaMetaBrokerTest.java
new file mode 100644 (file)
index 0000000..d59c839
--- /dev/null
@@ -0,0 +1,262 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.assertTrue;
+
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.ConfigPath;
+import org.I0Itec.zkclient.ZkClient;
+import org.apache.kafka.clients.admin.AdminClient;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker.KafkaTopic;
+import org.onap.dmaap.dmf.mr.metabroker.Broker1.TopicExistsException;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"})
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({ AdminClient.class })
+public class DMaaPKafkaMetaBrokerTest {
+
+       @InjectMocks
+       private DMaaPKafkaMetaBroker dMaaPKafkaMetaBroker;
+       @Mock
+       private ZkClient fZk;
+       @Mock
+       private AdminClient fKafkaAdminClient;
+       @Mock
+       private AdminClient client;
+       @Mock
+       private ConfigDb configDb;
+       @Mock
+       ConfigPath fBaseTopicData;
+       @Mock
+       private ZkClient zkClient;
+       @Mock
+       Topic mockTopic;
+
+       @Before
+       public void setUp() {
+               MockitoAnnotations.initMocks(this);
+               PowerMockito.mockStatic(AdminClient.class);
+               // PowerMockito.when(AdminClient.create (any(Properties.class)
+               // )).thenReturn(fKafkaAdminClient);
+
+               // PowerMockito.mockStatic(AdminUtils.class);
+               PowerMockito.when(configDb.parse("/topics")).thenReturn(fBaseTopicData);
+
+       }
+
+       @Test
+       public void testBrokercreate() {
+               DMaaPKafkaMetaBroker broker = new DMaaPKafkaMetaBroker();
+
+       }
+
+       @Test
+       public void testcreateTopicEntry() {
+               try {
+                       KafkaTopic kafkaTopic = new KafkaTopic("topics", configDb, fBaseTopicData);
+                       dMaaPKafkaMetaBroker.createTopicEntry("name", "desc", "owner", true);
+               } catch (Exception e) {
+                       assertTrue(true);
+               }
+
+       }
+
+       @Test
+       public void testGetAlltopics() {
+               try {
+                       dMaaPKafkaMetaBroker.getAllTopics();
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+       }
+
+       @Test
+       public void testcreateTopic() {
+               try {
+                       dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true);
+               } catch (CambriaApiException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       // TODO Auto-generatee.printStackTrace();
+               }
+
+       }
+
+       @Test
+       public void testcreateTopic_wrongPartition() {
+               try {
+
+                       dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 0, 1, true);
+               } catch (CambriaApiException e) {
+                       assertTrue(true);
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       // TODO Auto-generatee.printStackTrace();
+               }
+
+       }
+
+       @Test
+       public void testcreateTopic_wrongReplica() {
+               try {
+
+                       dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 0, true);
+               } catch (CambriaApiException e) {
+                       assertTrue(true);
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       // TODO Auto-generatee.printStackTrace();
+               }
+
+       }
+
+       @Test
+       public void testcreateTopic_error1() {
+               try {
+                       dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true);
+               } catch (CambriaApiException e) {
+                       assertTrue(true);
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+
+       }
+
+       @Test
+       public void testcreateTopic_error2() {
+               try {
+                       dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true);
+               } catch (CambriaApiException e) {
+                       assertTrue(true);
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+       }
+
+       @Test
+       public void testcreateTopic_error3() {
+               try {
+                       dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true);
+               } catch (CambriaApiException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (TopicExistsException e) {
+                       assertTrue(true);
+
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+
+       }
+
+       @Test
+       public void testDeleteTopic() {
+               try {
+                       dMaaPKafkaMetaBroker.deleteTopic("testtopic");
+               } catch (CambriaApiException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+               assertTrue(true);
+
+       }
+
+       @Test
+       public void testDeleteTopic_error1() {
+               try {
+                       dMaaPKafkaMetaBroker.deleteTopic("testtopic");
+               } catch (CambriaApiException e) {
+                       assertTrue(true);
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+
+       }
+
+       @Test
+       public void testDeleteTopic_error2() {
+               try {
+                       dMaaPKafkaMetaBroker.deleteTopic("testtopic");
+               } catch (CambriaApiException e) {
+                       assertTrue(true);
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+
+       }
+
+       @Test
+       public void testDeleteTopic_error3() {
+               try {
+                       dMaaPKafkaMetaBroker.deleteTopic("testtopic");
+               } catch (CambriaApiException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (TopicExistsException e) {
+                       assertTrue(true);
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..cf48007
--- /dev/null
@@ -0,0 +1,53 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.beans;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({ApiKeyBeanTest.class, ApiKeyBeanTest2.class, ApiKeyBeanTest3.class,
+    ApiKeyBeanTest4.class, ApiKeyBeanTest5.class, ApiKeyBeanTest6.class,
+    DMaaPCambriaLimiterTest.class, DMaaPContextTest.class, DMaaPContextTest2.class,
+    DMaaPContextTest3.class, DMaaPContextTest4.class, DMaaPContextTest5.class,
+    DMaaPContextTest6.class, LogDetailsTest.class, LogDetailsTest2.class,
+    LogDetailsTest3.class, LogDetailsTest4.class, LogDetailsTest5.class, LogDetailsTest6.class,
+    LogDetailsTest7.class, LogDetailsTest8.class, LogDetailsTest9.class, LogDetailsTest10.class,
+    LogDetailsTest11.class, LogDetailsTest12.class, LogDetailsTest13.class, LogDetailsTest14.class,
+    LogDetailsTest15.class, LogDetailsTest16.class, TopicBeanTest.class, TopicBeanTest2.class,
+    TopicBeanTest3.class, TopicBeanTest4.class, TopicBeanTest5.class, TopicBeanTest6.class,
+    TopicBeanTest7.class, TopicBeanTest8.class, TopicBeanTest9.class, TopicBeanTest10.class,})
+
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest.java
new file mode 100644 (file)
index 0000000..41a4c94
--- /dev/null
@@ -0,0 +1,85 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+import static org.junit.Assert.*;
+
+public class LogDetailsTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetPublisherId() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getPublisherId();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testGetPublisherLogDetails(){
+               
+               LogDetails details = new LogDetails();
+               details.setTotalMessageCount(1);
+               details.setConsumeTimestamp("02-27-1018");
+               details.setSubscriberGroupId("1");
+               details.setSubscriberId("1");
+               assertEquals(details.getTotalMessageCount(),1);
+               assertEquals(details.getConsumeTimestamp(),"02-27-1018");
+               assertEquals(details.getSubscriberId(),"1");
+               assertEquals(details.getSubscriberGroupId(),"1");
+       }
+       
+       @Test
+       public void testPublisherdetails(){
+               LogDetails details = new LogDetails();
+               assertNotNull(details.getPublisherLogDetails());
+               assertNull(details.getTransactionIdTs());
+               assertFalse(details.isTransactionEnabled());
+               assertEquals(details.getMessageLengthInBytes(),0);
+               assertNotNull(details.getPublishTimestamp());
+               assertNull(details.getMessageTimestamp());
+               assertNull(details.getMessageSequence());
+               assertNull(details.getMessageBatchId());
+               assertNull(details.getPublisherIp());
+               assertNull(details.getTopicId());
+               
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest10.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest10.java
new file mode 100644 (file)
index 0000000..c80062f
--- /dev/null
@@ -0,0 +1,56 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+public class LogDetailsTest10 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetPublishTimestamp() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getPublishTimestamp();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest11.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest11.java
new file mode 100644 (file)
index 0000000..83b91cb
--- /dev/null
@@ -0,0 +1,56 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+public class LogDetailsTest11 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetMessageLengthInBytes() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getMessageLengthInBytes();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest12.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest12.java
new file mode 100644 (file)
index 0000000..54f1fe5
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+import static org.junit.Assert.assertTrue;
+
+public class LogDetailsTest12 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetConsumeTimestamp() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getConsumeTimestamp();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest13.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest13.java
new file mode 100644 (file)
index 0000000..7e9ea3e
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+import static org.junit.Assert.assertTrue;
+
+public class LogDetailsTest13 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetTotalMessageCount() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getTotalMessageCount();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest14.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest14.java
new file mode 100644 (file)
index 0000000..b3dd6b8
--- /dev/null
@@ -0,0 +1,56 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+public class LogDetailsTest14 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testIsTransactionEnabled() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.isTransactionEnabled();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest15.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest15.java
new file mode 100644 (file)
index 0000000..65d92da
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+import static org.junit.Assert.assertTrue;
+
+public class LogDetailsTest15 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetTransactionIdTs() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getTransactionIdTs();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest16.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest16.java
new file mode 100644 (file)
index 0000000..95cee27
--- /dev/null
@@ -0,0 +1,56 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+public class LogDetailsTest16 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetServerIp() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getServerIp();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest17.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest17.java
new file mode 100644 (file)
index 0000000..39104be
--- /dev/null
@@ -0,0 +1,56 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+public class LogDetailsTest17 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetPublisherLogDetails() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getPublisherLogDetails();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest18.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest18.java
new file mode 100644 (file)
index 0000000..2fc6825
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+import static org.junit.Assert.assertTrue;
+
+public class LogDetailsTest18 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetPublisherLogDetails() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getPublisherLogDetails();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest2.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest2.java
new file mode 100644 (file)
index 0000000..a0912b9
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+import static org.junit.Assert.assertTrue;
+
+public class LogDetailsTest2 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetTransactionId() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getTransactionId();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest3.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest3.java
new file mode 100644 (file)
index 0000000..c04a7d7
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+import static org.junit.Assert.assertTrue;
+
+public class LogDetailsTest3 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetTopicId() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getTopicId();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest4.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest4.java
new file mode 100644 (file)
index 0000000..c73c47a
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+import static org.junit.Assert.assertTrue;
+
+public class LogDetailsTest4 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetSubscriberGroupId() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getSubscriberGroupId();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest5.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest5.java
new file mode 100644 (file)
index 0000000..36136e4
--- /dev/null
@@ -0,0 +1,56 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+public class LogDetailsTest5 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetSubscriberId() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getSubscriberId();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest6.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest6.java
new file mode 100644 (file)
index 0000000..ca583f9
--- /dev/null
@@ -0,0 +1,56 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+public class LogDetailsTest6 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetPublisherIp() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getPublisherIp();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest7.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest7.java
new file mode 100644 (file)
index 0000000..da03335
--- /dev/null
@@ -0,0 +1,56 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.*;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+public class LogDetailsTest7 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetMessageBatchId() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getMessageBatchId();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest8.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest8.java
new file mode 100644 (file)
index 0000000..81e550f
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+import static org.junit.Assert.assertTrue;
+
+public class LogDetailsTest8 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetMessageTimestamp() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getMessageTimestamp();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest9.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest9.java
new file mode 100644 (file)
index 0000000..376548d
--- /dev/null
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+import static org.junit.Assert.assertTrue;
+
+public class LogDetailsTest9 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetMessageSequence() {
+               
+               LogDetails details = new LogDetails();
+               
+               details.getMessageSequence();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TestRunner.java
new file mode 100644 (file)
index 0000000..dc024b4
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.beans;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest.java
new file mode 100644 (file)
index 0000000..fa0a9ed
--- /dev/null
@@ -0,0 +1,75 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * 
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+
+public class TopicBeanTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetTopicName() {
+               
+               TopicBean bean = new TopicBean();
+               
+               bean.getTopicName();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testTopicBean() {
+               
+               TopicBean bean = new TopicBean("topicName", "topicDescription", 1,1,true);
+               assertNotNull(bean);
+               
+       }
+       
+       @Test
+       public void testTopicBeanStter() {
+               
+               TopicBean bean = new TopicBean();
+               bean.setPartitionCount(1);
+               bean.setReplicationCount(1);
+               bean.setTopicDescription("topicDescription");
+               bean.setTopicName("topicName");
+               bean.setTransactionEnabled(true);
+               assertNotNull(bean);
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest10.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest10.java
new file mode 100644 (file)
index 0000000..de4fa7a
--- /dev/null
@@ -0,0 +1,53 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+
+public class TopicBeanTest10 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testSetTransactionEnabled() {
+               
+               TopicBean bean = new TopicBean();
+               bean.setTransactionEnabled(true);
+               
+       /*      String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));*/
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest2.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest2.java
new file mode 100644 (file)
index 0000000..a1bbf7c
--- /dev/null
@@ -0,0 +1,53 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+
+import static org.junit.Assert.assertTrue;
+
+public class TopicBeanTest2 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testSetTopicName() {
+               
+               TopicBean bean = new TopicBean();
+               bean.setTopicName("testTopic");
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest3.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest3.java
new file mode 100644 (file)
index 0000000..3227767
--- /dev/null
@@ -0,0 +1,53 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+
+import static org.junit.Assert.assertTrue;
+
+public class TopicBeanTest3 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetPartitionCount() {
+               
+               TopicBean bean = new TopicBean();
+               bean.getPartitionCount();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest4.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest4.java
new file mode 100644 (file)
index 0000000..3f5977a
--- /dev/null
@@ -0,0 +1,52 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+
+public class TopicBeanTest4 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testSetPartitionCount() {
+               
+               TopicBean bean = new TopicBean();
+               bean.setPartitionCount(8);
+               
+       /*      String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));*/
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest5.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest5.java
new file mode 100644 (file)
index 0000000..8a172e8
--- /dev/null
@@ -0,0 +1,52 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+
+public class TopicBeanTest5 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetReplicationCount() {
+               
+               TopicBean bean = new TopicBean();
+               bean.getReplicationCount();
+               
+       /*      String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));*/
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest6.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest6.java
new file mode 100644 (file)
index 0000000..edc6d39
--- /dev/null
@@ -0,0 +1,52 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+
+public class TopicBeanTest6 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testSetReplicationCount() {
+               
+               TopicBean bean = new TopicBean();
+               bean.setReplicationCount(3);
+               
+       /*      String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));*/
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest7.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest7.java
new file mode 100644 (file)
index 0000000..9c3ab9d
--- /dev/null
@@ -0,0 +1,53 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+
+public class TopicBeanTest7 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testIsTransactionEnabled() {
+               
+               TopicBean bean = new TopicBean();
+               bean.isTransactionEnabled();
+               
+       /*      String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));*/
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest8.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest8.java
new file mode 100644 (file)
index 0000000..bb5f6da
--- /dev/null
@@ -0,0 +1,53 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+
+public class TopicBeanTest8 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetTopicDescription() {
+               
+               TopicBean bean = new TopicBean();
+               bean.getTopicDescription();
+               
+       /*      String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));*/
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest9.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest9.java
new file mode 100644 (file)
index 0000000..3be66e4
--- /dev/null
@@ -0,0 +1,52 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.beans;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.TopicBean;
+
+public class TopicBeanTest9 {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testSetTopicDescription() {
+               
+               TopicBean bean = new TopicBean();
+               bean.setTopicDescription("testing topic");
+               
+       /*      String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));*/
+               
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java b/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java
new file mode 100644 (file)
index 0000000..3e3fd28
--- /dev/null
@@ -0,0 +1,167 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+
+ package org.onap.dmaap.mr.cambria.embed;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.curator.framework.CuratorFramework;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import org.onap.dmaap.dmf.mr.backends.kafka.KafkaPublisher;
+import org.onap.dmaap.dmf.mr.backends.memory.MemoryMetaBroker;
+import org.onap.dmaap.dmf.mr.backends.memory.MemoryQueue;
+import org.apache.kafka.clients.admin.AdminClient;
+import org.apache.kafka.clients.admin.AdminClientConfig;
+import org.apache.kafka.clients.admin.NewTopic;
+import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaConsumerFactory;
+import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import org.onap.dmaap.dmf.mr.beans.DMaaPMetricsSet;
+import org.onap.dmaap.dmf.mr.beans.DMaaPZkClient;
+import org.onap.dmaap.dmf.mr.beans.DMaaPZkConfigDb;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.onap.dmaap.dmf.mr.utils.DMaaPCuratorFactory;
+import org.onap.dmaap.dmf.mr.utils.PropertyReader;
+import com.att.nsa.security.db.BaseNsaApiDbImpl;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
+
+
+public class EmbedConfigurationReader {
+       private static final String DEFAULT_KAFKA_LOG_DIR = "/kafka_embedded";
+    public static final String TEST_TOPIC = "testTopic";
+    private static final int BROKER_ID = 0;
+    private static final int BROKER_PORT = 5000;
+    private static final String LOCALHOST_BROKER = String.format("localhost:%d", BROKER_PORT);
+
+    private static final String DEFAULT_ZOOKEEPER_LOG_DIR = "/zookeeper";
+    private static final int ZOOKEEPER_PORT = 2000;
+    private static final String ZOOKEEPER_HOST = String.format("localhost:%d", ZOOKEEPER_PORT);
+
+    private static final String groupId = "groupID";
+    String dir;
+    private  AdminClient fKafkaAdminClient;
+    KafkaLocal kafkaLocal;
+       
+       public void setUp() throws Exception {
+               
+               ClassLoader classLoader = getClass().getClassLoader();          
+               AJSCPropertiesMap.refresh(new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile()));
+               
+               Properties kafkaProperties;
+        Properties zkProperties;
+
+        try {
+            //load properties
+               dir = new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile()).getParent();
+            kafkaProperties = getKafkaProperties(dir + DEFAULT_KAFKA_LOG_DIR, BROKER_PORT, BROKER_ID);
+            zkProperties = getZookeeperProperties(ZOOKEEPER_PORT,dir + DEFAULT_ZOOKEEPER_LOG_DIR);
+
+            //start kafkaLocalServer
+            kafkaLocal = new KafkaLocal(kafkaProperties, zkProperties);
+            
+            Map<String, String> map = AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop);
+            map.put(CambriaConstants.kSetting_ZkConfigDbServers, ZOOKEEPER_HOST);
+            map.put("kafka.client.zookeeper", ZOOKEEPER_HOST);
+            map.put("kafka.metadata.broker.list", LOCALHOST_BROKER);
+            
+            DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(new PropertyReader());
+            
+            final Properties props = new Properties ();
+            props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092" );
+            props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret'");
+                props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");            
+            props.put("sasl.mechanism", "PLAIN");
+            fKafkaAdminClient = AdminClient.create ( props );
+            
+           // if(!AdminUtils.topicExists(dMaaPZkClient, TEST_TOPIC))
+            // AdminUtils.createTopic(dMaaPZkClient, TEST_TOPIC, 3, 1, new Properties());
+            final NewTopic topicRequest = new NewTopic ( TEST_TOPIC, 3, new Integer(1).shortValue () );
+                        fKafkaAdminClient.createTopics ( Arrays.asList ( topicRequest ) );
+            Thread.sleep(5000);
+        } catch (Exception e){
+            e.printStackTrace(System.out);
+        }      
+       }
+       
+       private static Properties getKafkaProperties(String logDir, int port, int brokerId) {
+        Properties properties = new Properties();
+        properties.put("port", port + "");
+        properties.put("broker.id", brokerId + "");
+        properties.put("log.dir", logDir);
+        properties.put("zookeeper.connect", ZOOKEEPER_HOST);
+        properties.put("default.replication.factor", "1");
+        properties.put("delete.topic.enable", "true");
+        properties.put("consumer.timeout.ms", -1);
+        return properties;
+    }
+       
+       private static Properties getZookeeperProperties(int port, String zookeeperDir) {
+        Properties properties = new Properties();
+        properties.put("clientPort", port + "");
+        properties.put("dataDir", zookeeperDir);
+        return properties;
+    }
+
+       public void tearDown() throws Exception {
+               DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(new PropertyReader());
+               if(fKafkaAdminClient!=null)
+               fKafkaAdminClient.deleteTopics(Arrays.asList(TEST_TOPIC));
+               //AdminUtils.deleteTopic(dMaaPZkClient, TEST_TOPIC);
+               //dMaaPZkClient.delete(dir + DEFAULT_KAFKA_LOG_DIR);
+               //dMaaPZkClient.delete(dir + DEFAULT_ZOOKEEPER_LOG_DIR);
+               kafkaLocal.stop();
+               FileUtils.cleanDirectory(new File(dir + DEFAULT_KAFKA_LOG_DIR));                
+       }
+
+
+       public ConfigurationReader buildConfigurationReader() throws Exception {
+               
+               setUp();
+               
+               PropertyReader propertyReader = new PropertyReader();
+               DMaaPMetricsSet dMaaPMetricsSet = new DMaaPMetricsSet(propertyReader);
+               DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(propertyReader);
+               DMaaPZkConfigDb dMaaPZkConfigDb = new DMaaPZkConfigDb(dMaaPZkClient, propertyReader);
+               CuratorFramework curatorFramework = DMaaPCuratorFactory.getCurator(new PropertyReader());
+               DMaaPKafkaConsumerFactory dMaaPKafkaConsumerFactory = new DMaaPKafkaConsumerFactory(dMaaPMetricsSet, curatorFramework,null);
+               MemoryQueue memoryQueue = new MemoryQueue();
+               MemoryMetaBroker memoryMetaBroker = new MemoryMetaBroker(memoryQueue, dMaaPZkConfigDb);
+               BaseNsaApiDbImpl<NsaSimpleApiKey> baseNsaApiDbImpl = new BaseNsaApiDbImpl<>(dMaaPZkConfigDb, new NsaSimpleApiKeyFactory());
+               DMaaPAuthenticator<NsaSimpleApiKey> dMaaPAuthenticator = new DMaaPAuthenticatorImpl<>(baseNsaApiDbImpl);
+               KafkaPublisher kafkaPublisher = new KafkaPublisher(propertyReader);
+               DMaaPKafkaMetaBroker dMaaPKafkaMetaBroker = new DMaaPKafkaMetaBroker(propertyReader, dMaaPZkClient, dMaaPZkConfigDb);
+               
+               return new ConfigurationReader(propertyReader, 
+                               dMaaPMetricsSet, dMaaPZkClient, dMaaPZkConfigDb, kafkaPublisher, 
+                               curatorFramework, dMaaPKafkaConsumerFactory, dMaaPKafkaMetaBroker, 
+                               memoryQueue, memoryMetaBroker, baseNsaApiDbImpl, dMaaPAuthenticator);
+               
+       }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/embed/KafkaLocal.java b/src/test/java/org/onap/dmaap/mr/cambria/embed/KafkaLocal.java
new file mode 100644 (file)
index 0000000..9f3c05a
--- /dev/null
@@ -0,0 +1,58 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.embed;
+
+import kafka.server.KafkaConfig;
+import kafka.server.KafkaServerStartable;
+
+import java.io.IOException;
+import java.util.Properties;
+
+
+public class KafkaLocal {
+       public KafkaServerStartable kafka;
+       public ZooKeeperLocal zookeeper;
+       
+       public KafkaLocal(Properties kafkaProperties, Properties zkProperties) throws IOException, InterruptedException{
+               KafkaConfig kafkaConfig = new KafkaConfig(kafkaProperties);
+               
+               //start local zookeeper
+               System.out.println("starting local zookeeper...");
+               zookeeper = new ZooKeeperLocal(zkProperties);
+               System.out.println("done");
+               
+               //start local kafka broker
+               kafka = new KafkaServerStartable(kafkaConfig);
+               System.out.println("starting local kafka broker...");
+               kafka.startup();
+               System.out.println("done");
+       }
+       
+       
+       public void stop(){
+               //stop kafka broker
+               System.out.println("stopping kafka...");
+               kafka.shutdown();
+               System.out.println("done");
+       }
+       
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/embed/ZooKeeperLocal.java b/src/test/java/org/onap/dmaap/mr/cambria/embed/ZooKeeperLocal.java
new file mode 100644 (file)
index 0000000..3209845
--- /dev/null
@@ -0,0 +1,59 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.embed;
+
+import org.apache.zookeeper.server.ServerConfig;
+import org.apache.zookeeper.server.ZooKeeperServerMain;
+import org.apache.zookeeper.server.quorum.QuorumPeerConfig;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.Properties;
+
+public class ZooKeeperLocal {
+       
+       ZooKeeperServerMain zooKeeperServer;
+       
+       public ZooKeeperLocal(Properties zkProperties) throws FileNotFoundException, IOException{
+               QuorumPeerConfig quorumConfiguration = new QuorumPeerConfig();
+               try {
+                   quorumConfiguration.parseProperties(zkProperties);
+               } catch(Exception e) {
+                   throw new RuntimeException(e);
+               }
+               zooKeeperServer = new ZooKeeperServerMain();
+               final ServerConfig configuration = new ServerConfig();
+               configuration.readFrom(quorumConfiguration);
+               
+               
+               new Thread() {
+                   public void run() {
+                       try {
+                           zooKeeperServer.runFromConfig(configuration);
+                       } catch (IOException e) {
+                           System.out.println("ZooKeeper Failed");
+                           e.printStackTrace(System.err);
+                       }
+                   }
+               }.start();
+       }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPCambriaExceptionMapperTest.java b/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPCambriaExceptionMapperTest.java
new file mode 100644 (file)
index 0000000..6e4f75e
--- /dev/null
@@ -0,0 +1,58 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.exception;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.exception.DMaaPCambriaExceptionMapper;
+
+import static org.junit.Assert.assertTrue;
+
+public class DMaaPCambriaExceptionMapperTest {
+
+       @Before
+       public void setUp() throws Exception {
+               DMaaPCambriaExceptionMapper exception = new DMaaPCambriaExceptionMapper();
+       }
+
+       @After
+       public void tearDown() throws Exception {
+               
+       }
+
+       
+       @Test
+       public void testToResponse() {
+               
+               DMaaPCambriaExceptionMapper mapper = new DMaaPCambriaExceptionMapper();
+               
+               try {
+                       mapper.toResponse(null);
+               } catch (NullPointerException e) {
+                       assertTrue(true);
+               }
+               
+               
+       
+       }
+
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPErrorMessagesTest.java b/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPErrorMessagesTest.java
new file mode 100644 (file)
index 0000000..a01ce90
--- /dev/null
@@ -0,0 +1,370 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.exception;
+
+import static org.junit.Assert.*;
+import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class DMaaPErrorMessagesTest {
+
+       @Before
+       public void setUp() throws Exception {
+               
+       }
+
+       @After
+       public void tearDown() throws Exception {
+               
+       }
+
+       
+       @Test
+       public void testGetMsgSizeExceeds() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getMsgSizeExceeds();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetMsgSizeExceeds() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setMsgSizeExceeds("200");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetNotFound() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getNotFound();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetNotFound() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setNotFound("not found");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetServerUnav() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getServerUnav();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetServerUnav() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setServerUnav("server1");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetMethodNotAllowed() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getMethodNotAllowed();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetMethodNotAllowed() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setMethodNotAllowed("server2");
+               assertTrue(true);
+
+       }
+       
+
+       @Test
+       public void testGetBadRequest() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getBadRequest();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetBadRequest() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setBadRequest("badRequest");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetNwTimeout() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getNwTimeout();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetNwTimeout() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setNwTimeout("12:00:00");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetNotPermitted1() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getNotPermitted1();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetNotPermitted1() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setNotPermitted1("not permitted");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetNotPermitted2() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getNotPermitted2();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetNotPermitted2() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setNotPermitted2("not permitted2");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetTopicsfailure() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getTopicsfailure();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetTopicsfailure() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setTopicsfailure("failure");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetTopicDetailsFail() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getTopicDetailsFail();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetTopicDetailsFail() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setTopicDetailsFail("topic details fail");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetCreateTopicFail() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getCreateTopicFail();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetCreateTopicFail() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setCreateTopicFail("topic details fail");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetIncorrectJson() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getIncorrectJson();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetIncorrectJson() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setIncorrectJson("incorrect Json");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetDeleteTopicFail() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getDeleteTopicFail();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetDeleteTopicFail() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setDeleteTopicFail("delete tpic fail");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetConsumeMsgError() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getConsumeMsgError();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetConsumeMsgError() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setConsumeMsgError("consume message error");
+               assertTrue(true);
+
+       }
+       
+
+       @Test
+       public void testGetPublishMsgError() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getPublishMsgError();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetPublishMsgError() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setPublishMsgError("publish message error");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetPublishMsgCount() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getPublishMsgCount();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetPublishMsgCount() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setPublishMsgCount("200");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetAuthFailure() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getAuthFailure();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetAuthFailure() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setAuthFailure("auth failure");
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetTopicNotExist() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.getTopicNotExist();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetTopicNotExist() {
+               
+               DMaaPErrorMessages msg = new DMaaPErrorMessages();
+               msg.setTopicNotExist("toopic doesn't exist");
+               assertTrue(true);
+
+       }
+       
+       
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPWebExceptionMapperTest.java b/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPWebExceptionMapperTest.java
new file mode 100644 (file)
index 0000000..dfb8517
--- /dev/null
@@ -0,0 +1,58 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.exception;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.exception.DMaaPWebExceptionMapper;
+
+import static org.junit.Assert.assertTrue;
+
+public class DMaaPWebExceptionMapperTest {
+
+       @Before
+       public void setUp() throws Exception {
+               
+       }
+
+       @After
+       public void tearDown() throws Exception {
+               
+       }
+
+       
+       @Test
+       public void testToResponse() {
+               
+               DMaaPWebExceptionMapper msg = new DMaaPWebExceptionMapper();
+               
+               try {
+                       msg.toResponse(null);
+               } catch (Exception e) {
+                       assertTrue(true);
+               }
+               
+       
+
+       }
+               
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/exception/ErrorResponseTest.java b/src/test/java/org/onap/dmaap/mr/cambria/exception/ErrorResponseTest.java
new file mode 100644 (file)
index 0000000..b5aa75a
--- /dev/null
@@ -0,0 +1,144 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.exception;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.exception.ErrorResponse;
+
+import static org.junit.Assert.assertTrue;
+
+public class ErrorResponseTest {
+
+       @Before
+       public void setUp() throws Exception {
+               
+       }
+
+       @After
+       public void tearDown() throws Exception {
+               
+       }
+
+       
+       @Test
+       public void testGetHttpStatusCode() {
+               
+               ErrorResponse resp = new ErrorResponse(200, 500, "no error");
+                               
+               resp.getHttpStatusCode();
+               assertTrue(true);
+               
+
+       }
+       
+       @Test
+       public void tesSGetHttpStatusCode() {
+               
+               ErrorResponse resp = new ErrorResponse(200, 500, "no error");
+                               
+               resp.setHttpStatusCode(200);
+               assertTrue(true);
+               
+
+       }
+       
+       @Test
+       public void testGetMrErrorCode() {
+               
+               ErrorResponse resp = new ErrorResponse(200, 500, "no error");
+                               
+               resp.getMrErrorCode();
+               assertTrue(true);
+               
+
+       }
+       
+       @Test
+       public void testSetMrErrorCode() {
+               
+               ErrorResponse resp = new ErrorResponse(200, 500, "no error");
+                               
+               resp.setMrErrorCode(500);
+               assertTrue(true);
+               
+
+       }
+       
+       @Test
+       public void testGetErrorMessage() {
+               
+               ErrorResponse resp = new ErrorResponse(200, 500, "no error");
+                               
+               resp.getErrorMessage();
+               assertTrue(true);
+               
+
+       }
+       
+       @Test
+       public void testSetErrorMessage() {
+               
+               ErrorResponse resp = new ErrorResponse(200, 500, "no error");
+                               
+               resp.setErrorMessage("no error");
+               assertTrue(true);
+               
+
+       }
+       
+       @Test
+       public void testToString() {
+               
+               ErrorResponse resp = new ErrorResponse(200, 500, "no error");
+                               
+               resp.toString();
+               assertTrue(true);
+               
+
+       }
+       
+       @Test
+       public void testGetErrMapperStr1() {
+               
+               ErrorResponse resp = new ErrorResponse(200, 500, "no error");
+                               
+               resp.setHelpURL("/help");
+               assertTrue(true);
+               
+
+       }
+       
+       @Test
+       public void testGetErrMapperStr() {
+               
+               ErrorResponse resp = new ErrorResponse(200, 500, "no error");
+                               
+               resp.getHelpURL();
+               assertTrue(true);
+               
+
+       }
+       
+               
+               
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/exception/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/exception/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..dc0cf50
--- /dev/null
@@ -0,0 +1,43 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.exception;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({DMaaPCambriaExceptionMapperTest.class,
+    DMaaPErrorMessagesTest.class, DMaaPWebExceptionMapperTest.class, ErrorResponseTest.class})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/exception/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/exception/TestRunner.java
new file mode 100644 (file)
index 0000000..77920c8
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.exception;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/listener/CambriaServletContextListenerTest.java b/src/test/java/org/onap/dmaap/mr/cambria/listener/CambriaServletContextListenerTest.java
new file mode 100644 (file)
index 0000000..c97030e
--- /dev/null
@@ -0,0 +1,76 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+ package org.onap.dmaap.mr.cambria.listener;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.listener.CambriaServletContextListener;
+
+import static org.junit.Assert.assertTrue;
+
+public class CambriaServletContextListenerTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testContextDestroyed() {
+               CambriaServletContextListener listener = new CambriaServletContextListener();
+               try {
+                       listener.contextDestroyed(null);
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testContextInitialized() {
+               CambriaServletContextListener listener = new CambriaServletContextListener();
+               
+               try {
+                       listener.contextInitialized(null);
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+               
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+       
+
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/listener/DME2EndPointLoaderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/listener/DME2EndPointLoaderTest.java
new file mode 100644 (file)
index 0000000..ffc2535
--- /dev/null
@@ -0,0 +1,76 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.listener;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.listener.DME2EndPointLoader;
+
+import static org.junit.Assert.assertTrue;
+
+public class DME2EndPointLoaderTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testPublishEndPoints() {
+               DME2EndPointLoader loader = DME2EndPointLoader.getInstance();
+               
+               
+               try {
+                       loader.publishEndPoints();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+               
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testUnPublishEndPoints() {
+               DME2EndPointLoader loader = DME2EndPointLoader.getInstance();
+               
+               
+               try {
+                       loader.unPublishEndPoints();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/listener/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/listener/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..0dd7db5
--- /dev/null
@@ -0,0 +1,43 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+package org.onap.dmaap.mr.cambria.listener;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({DME2EndPointLoaderTest.class, CambriaServletContextListenerTest.class})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/listener/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/listener/TestRunner.java
new file mode 100644 (file)
index 0000000..82078c6
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.listener;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImpl.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImpl.java
new file mode 100644 (file)
index 0000000..94fdb6b
--- /dev/null
@@ -0,0 +1,71 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.metabroker;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.metabroker.Broker;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+public class BrokerImpl implements Broker {
+
+       @Override
+       public List<Topic> getAllTopics() throws ConfigDbException {
+               // TODO Auto-generated method stub
+               Topic top = new TopicImplem();
+
+               List<Topic> list = new ArrayList<Topic>();
+
+               for (int i = 0; i < 5; i++) {
+                       top = new TopicImplem();
+                       list.add(top);
+
+               }
+
+               return null;
+
+       }
+
+       @Override
+       public Topic getTopic(String topic) throws ConfigDbException {
+               // TODO Auto-generated method stub
+               return new TopicImplem();
+       }
+
+       @Override
+       public Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas,
+                       boolean transactionEnabled) throws TopicExistsException, CambriaApiException {
+               // TODO Auto-generated method stub
+               return new TopicImplem(topic, description, ownerApiKey, transactionEnabled);
+       }
+
+       @Override
+       public void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException {
+               // TODO Auto-generated method stub
+               Topic top = new TopicImplem();
+
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImplTest.java
new file mode 100644 (file)
index 0000000..19487df
--- /dev/null
@@ -0,0 +1,108 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+ package org.onap.dmaap.mr.cambria.metabroker;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException;
+
+import static org.junit.Assert.assertTrue;
+
+public class BrokerImplTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetOwners() {
+
+               try {
+                       new BrokerImpl().getAllTopics();
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+               assertTrue(true);
+       }
+
+       @Test
+       public void testGetTopic() {
+
+               try {
+                       new BrokerImpl().getTopic("topicName");
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+               assertTrue(true);
+
+       }
+
+       @Test
+       public void testCreateTopic() {
+
+               try {
+                       new BrokerImpl().createTopic("topicName", "testing topic", "owner123", 3, 3, true);
+
+               } catch (CambriaApiException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+               assertTrue(true);
+
+       }
+
+       @Test
+       public void testDeleteTopic() {
+
+               try {
+                       new BrokerImpl().deleteTopic("topicName");
+               } catch (CambriaApiException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+               assertTrue(true);
+
+       }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..b321af5
--- /dev/null
@@ -0,0 +1,42 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.metabroker;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({BrokerImplTest.class, TopicImplemTest.class,})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TestRunner.java
new file mode 100644 (file)
index 0000000..acf2184
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.metabroker;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplTest.java
new file mode 100644 (file)
index 0000000..fabe91c
--- /dev/null
@@ -0,0 +1,25 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.metabroker;
+
+public class TopicImplTest {
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplem.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplem.java
new file mode 100644 (file)
index 0000000..5aea70a
--- /dev/null
@@ -0,0 +1,140 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.metabroker;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaApiKey;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+
+import java.util.Set;
+
+public class TopicImplem implements Topic {
+       private String name, owner, description;
+       boolean isTransactionEnabled;
+       private Set<String> set = null;
+       private  NsaAcl readerAcl, writerAcl;
+       
+       public TopicImplem() {
+               name = getName();
+               owner = getOwner();
+               description = getDescription();
+               isTransactionEnabled = true;
+               readerAcl = getReaderAcl();
+               writerAcl = getWriterAcl();
+       }
+       
+       public TopicImplem(String topic, String description, String ownerApiKey, boolean transactionEnabled) {
+               
+               this.name = topic;
+               this.owner = ownerApiKey;
+               this.description = description;
+               isTransactionEnabled = transactionEnabled;
+               
+               
+       }
+       @Override
+       public Set<String> getOwners() {
+               // TODO Auto-generated method stub
+               for (int i = 0; i < 5; i++) {
+                       set.add("string" + (i + 1));
+               }
+               return set;
+       }
+
+       @Override
+       public String getName() {
+               // TODO Auto-generated method stub
+               return "testTopic";
+       }
+
+       @Override
+       public String getOwner() {
+               // TODO Auto-generated method stub
+               return "owner";
+       }
+
+       @Override
+       public String getDescription() {
+               // TODO Auto-generated method stub
+               return "topic for testing purposes";
+       }
+
+       @Override
+       public boolean isTransactionEnabled() {
+               // TODO Auto-generated method stub
+               return true;
+       }
+
+       @Override
+       public NsaAcl getReaderAcl() {
+               // TODO Auto-generated method stub
+               return new NsaAcl();
+       }
+
+       @Override
+       public NsaAcl getWriterAcl() {
+               // TODO Auto-generated method stub
+               return new NsaAcl();
+       }
+
+       @Override
+       public void checkUserRead(NsaApiKey user) throws AccessDeniedException {
+               // TODO Auto-generated method stub
+               NsaApiKey u = user;
+       }
+
+       @Override
+       public void checkUserWrite(NsaApiKey user) throws AccessDeniedException {
+               // TODO Auto-generated method stub
+               
+               NsaApiKey u = user;
+       }
+
+       @Override
+       public void permitWritesFromUser(String publisherId, NsaApiKey asUser)
+                       throws AccessDeniedException, ConfigDbException {
+               // TODO Auto-generated method stub
+               String id = publisherId;
+               
+       }
+
+       @Override
+       public void denyWritesFromUser(String publisherId, NsaApiKey asUser)
+                       throws AccessDeniedException, ConfigDbException {
+               // TODO Auto-generated method stub
+               String id = publisherId;
+               
+       }
+
+       @Override
+       public void permitReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException, ConfigDbException {
+               // TODO Auto-generated method stub
+               String id = consumerId;
+       }
+
+       @Override
+       public void denyReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException, ConfigDbException {
+               // TODO Auto-generated method stub
+               String id = consumerId;
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplemTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplemTest.java
new file mode 100644 (file)
index 0000000..c1c9489
--- /dev/null
@@ -0,0 +1,176 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.metabroker;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+public class TopicImplemTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       
+       @Test
+       public void testGetOwners() {
+
+               assertNotNull(new TopicImplem().getOwner());
+
+       }
+       
+       @Test
+       public void testGetName() {
+
+               assertNotNull(new TopicImplem().getName());
+
+       }
+       
+       @Test
+       public void testGetOwner() {
+
+               assertNotNull(new TopicImplem().getOwner());
+
+       }
+       
+       @Test
+       public void testGetDescription() {
+
+               assertNotNull(new TopicImplem().getDescription());
+
+       }
+       
+       @Test
+       public void testIsTransactionEnabled() {
+
+               assertTrue(new TopicImplem().isTransactionEnabled());
+
+       }
+       
+       @Test
+       public void testGetReaderAcl() {
+               new TopicImplem().getReaderAcl();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testGetWriterAcl() {
+               new TopicImplem().getReaderAcl();
+               assertTrue(true);
+
+       }
+       
+
+       @Test
+       public void testCheckUserRead() {
+               try {
+                       new TopicImplem().checkUserRead(null);
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testCheckUserWrite() {
+               try {
+                       new TopicImplem().checkUserWrite(null);
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testPermitWritesFromUser() {
+               try {
+                       new TopicImplem().permitWritesFromUser("publisherId", null);
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testDenyWritesFromUser() {
+               try {
+                       new TopicImplem().denyWritesFromUser("publisherId", null);
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testPermitReadsByUser() {
+               try {
+                       new TopicImplem().permitReadsByUser("consumerId", null);
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testDenyReadsByUser() {
+               try {
+                       new TopicImplem().denyReadsByUser("consumerId", null);
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               assertTrue(true);
+
+       }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/CambriaPublisherUtilityTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/CambriaPublisherUtilityTest.java
new file mode 100644 (file)
index 0000000..490c161
--- /dev/null
@@ -0,0 +1,93 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.metrics.publisher;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.metrics.publisher.CambriaPublisherUtility;
+
+import static org.junit.Assert.assertTrue;
+
+public class CambriaPublisherUtilityTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testEscape() {
+               
+               CambriaPublisherUtility utility = new CambriaPublisherUtility();
+               
+               utility.escape("testTopic");
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testMakeUrl() {
+               
+               CambriaPublisherUtility utility = new CambriaPublisherUtility();
+               
+               utility.makeUrl("testTopic");
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testMakeConsumerUrl() {
+               
+               CambriaPublisherUtility utility = new CambriaPublisherUtility();
+               
+               utility.makeConsumerUrl("testTopic", "CG1", "23");
+               assertTrue(true);
+               
+       }
+
+       @Test
+       public void testCreateHostsList() {
+               
+               CambriaPublisherUtility utility = new CambriaPublisherUtility();
+               
+               try {
+                       utility.createHostsList(null);
+               } catch (NullPointerException e) {
+                       assertTrue(true);
+               }
+
+                       
+       }
+       
+       @Test
+       public void testHostForString() {
+               
+               CambriaPublisherUtility utility = new CambriaPublisherUtility();
+               
+               utility.hostForString("hello");
+               assertTrue(true);
+               
+       }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/DMaaPCambriaClientFactoryTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/DMaaPCambriaClientFactoryTest.java
new file mode 100644 (file)
index 0000000..13e97e4
--- /dev/null
@@ -0,0 +1,166 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.metrics.publisher;
+
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.metrics.publisher.DMaaPCambriaClientFactory;
+
+public class DMaaPCambriaClientFactoryTest {
+       
+       private Collection<String> hostSet;
+       
+       private String[] hostSetArray; 
+       @Before
+       public void setUp() throws Exception {
+               hostSet = new ArrayList<String>();
+               
+               hostSetArray = new String[10];
+               
+               for (int i = 0; i < 10; i++) {
+                       hostSet.add("host" + (i+1));
+                       hostSetArray[i] = "host" + (i+1);
+               }
+               
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testCreateConsumer() {
+               
+               
+               
+               DMaaPCambriaClientFactory.createConsumer("hostList", "testTopic");
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testCreateConsumer2() {
+               
+               
+               try {
+                       DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic");
+               } catch (NullPointerException e) {
+                       assertTrue(true);
+               }
+               
+               
+       }
+       
+       @Test
+       public void testCreateConsumer3() {
+               
+               DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic", "filter");
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testCreateConsumer4() {
+               DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic", "CG1", "23");
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testCreateConsumer5() {
+               
+               DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic", "CG1", "23", 100, 20);
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testCreateConsumer6() {
+               
+               
+               DMaaPCambriaClientFactory.createConsumer("hostList", "testTopic", "CG1", "23", 100, 20, "filter", "apikey", "apisecret");
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testCreateConsumer7() {
+               
+               DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic", "CG1", "23", 100, 20, "filter", "apikey", "apisecret");
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testCreateSimplePublisher() {
+               
+               DMaaPCambriaClientFactory.createSimplePublisher("hostList", "testTopic");
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testCreateBatchingPublisher() {
+               
+               DMaaPCambriaClientFactory.createBatchingPublisher("hostList", "testTopic", 100, 50);
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testCreateBatchingPublisher2() {
+               
+               DMaaPCambriaClientFactory.createBatchingPublisher("hostList", "testTopic", 100, 50, true);
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testCreateBatchingPublisher3() {
+               
+               DMaaPCambriaClientFactory.createBatchingPublisher(hostSetArray, "testTopic", 100, 50, true);
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testCreateBatchingPublisher4() {
+               
+               DMaaPCambriaClientFactory.createBatchingPublisher(hostSet, "testTopic", 100, 50, true);
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void $testInject() {
+               
+               DMaaPCambriaClientFactory factory = new DMaaPCambriaClientFactory();
+               factory.$testInject(null);
+               assertTrue(true);
+               
+       }
+       
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..8dbcf12
--- /dev/null
@@ -0,0 +1,42 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.metrics.publisher;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({DMaaPCambriaClientFactoryTest.class, CambriaPublisherUtilityTest.class})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/TestRunner.java
new file mode 100644 (file)
index 0000000..92e9f7d
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.metrics.publisher;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/CambriaBaseClientTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/CambriaBaseClientTest.java
new file mode 100644 (file)
index 0000000..8127b42
--- /dev/null
@@ -0,0 +1,95 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.metrics.publisher.impl;
+
+import static org.junit.Assert.*;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+
+import org.onap.dmaap.dmf.mr.metrics.publisher.impl.CambriaBaseClient;
+
+import org.json.JSONArray;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class CambriaBaseClientTest {
+       
+       private CambriaBaseClient client = null;
+       @Before
+       public void setUp() throws Exception {
+               
+               Collection<String> hosts = new ArrayList<String>();
+               
+               for (int i = 0; i < 5; i++) {
+                       hosts.add("host"+(i+1));
+               }
+               
+               
+               client = new CambriaBaseClient(hosts, "client1");
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testClose() {
+               client.close();
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testGetLog() {
+               client.getLog();
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testLogTo() {
+               client.logTo(null);
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       public JSONArray getJSONArray() {
+               
+               String[] data = {"stringone", "stringtwo"};
+               JSONArray array = new JSONArray(Arrays.asList(data));
+
+               return array;
+       }
+       
+       @Test
+       public void testJsonArrayToSet() {
+               client.jsonArrayToSet(getJSONArray());
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/ClockTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/ClockTest.java
new file mode 100644 (file)
index 0000000..241c5f6
--- /dev/null
@@ -0,0 +1,84 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.metrics.publisher.impl;
+
+import static org.junit.Assert.*;
+
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.metrics.publisher.impl.Clock;
+
+public class ClockTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetIt() {
+               
+               Clock.getIt();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testNow() {
+               
+               Clock.now();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testNowImpl() {
+               
+               Clock clock = new Clock();
+               clock.nowImpl();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testRegister() {
+               
+               Clock clock = new Clock();
+               Clock.register(clock);
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImplTest.java
new file mode 100644 (file)
index 0000000..2bc2a7f
--- /dev/null
@@ -0,0 +1,93 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+package org.onap.dmaap.mr.cambria.metrics.publisher.impl;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.metrics.publisher.impl.DMaaPCambriaConsumerImpl;
+
+public class DMaaPCambriaConsumerImplTest {
+       
+       private DMaaPCambriaConsumerImpl consumer = null; 
+       @Before
+       public void setUp() throws Exception {
+               
+               Collection<String> hosts = new ArrayList<String>();
+               
+               for (int i = 0; i < 5; i++) {
+                       hosts.add("host"+(i+1));
+               }
+               consumer = new DMaaPCambriaConsumerImpl(hosts, "testTopic", "consumerGroup1", "1", 2000, 200, "hi", 
+                               "9AMFFNIZpusO54oG","6BY86UQcio2LJdgyU7Cwg5oQ");
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testStringToList() {
+               
+               List<String> response = DMaaPCambriaConsumerImpl.stringToList("Hello world, this is a test string");
+               assertNotNull(response);
+               
+               
+       }
+       
+       @Test
+       public void testFetch() {
+               
+               Iterable<String> response = null;
+               boolean flag = true;
+               try {
+                       response = consumer.fetch(200, 20);
+               } catch (IOException e) {
+                       flag = false;
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               if(flag) {
+                       assertNotNull(response);
+               } else {
+                       assertTrue(true);
+               }
+               
+       }
+       
+       
+       @Test
+       public void testCreateUrlPath() {
+       
+               String response = consumer.createUrlPath(200, 20);
+               assertNotNull(response);
+       }
+
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisherTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisherTest.java
new file mode 100644 (file)
index 0000000..f936de5
--- /dev/null
@@ -0,0 +1,86 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.metrics.publisher.impl;
+
+import static org.junit.Assert.*;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher;
+
+
+public class DMaaPCambriaSimplerBatchPublisherTest {
+       
+       private DMaaPCambriaSimplerBatchPublisher publisher = null;
+       @Before
+       public void setUp() throws Exception {
+               
+               Collection<String> hosts = new ArrayList<String>();
+               
+               for (int i = 0; i < 5; i++) {
+                       hosts.add("host"+(i+1));
+               }
+               
+               publisher = new DMaaPCambriaSimplerBatchPublisher.Builder().againstUrls(hosts).onTopic("testTopic")
+                                       .batchTo(200, 100).compress(true).build();                              
+
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testSend() {
+               
+               publisher.send("hello", "test message");
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testClose() {
+               
+               publisher.close();
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+       @Test
+       public void testGetPendingMEssageCount() {
+               
+               publisher.getPendingMessageCount();
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..4bb1d15
--- /dev/null
@@ -0,0 +1,43 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.metrics.publisher.impl;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({DMaaPCambriaSimplerBatchPublisherTest.class, ClockTest.class,
+    CambriaBaseClientTest.class, DMaaPCambriaConsumerImplTest.class})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/TestRunner.java
new file mode 100644 (file)
index 0000000..451be9e
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.metrics.publisher.impl;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaEventSetTest.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaEventSetTest.java
new file mode 100644 (file)
index 0000000..48405e8
--- /dev/null
@@ -0,0 +1,73 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.resources;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.resources.CambriaEventSet;
+import org.onap.dmaap.dmf.mr.resources.CambriaOutboundEventStream;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+
+import static org.junit.Assert.assertTrue;
+
+
+public class CambriaEventSetTest {
+
+       private CambriaOutboundEventStream coes = null;
+       
+       @Before
+       public void setUp() throws Exception {
+       
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testNext() {
+               CambriaEventSet event = null;
+               String str = "contains text to be converted to InputStream";
+               
+               InputStream stream = new ByteArrayInputStream(str.getBytes());
+               try {
+                       event = new CambriaEventSet("application/cambria", stream, true, "hi");
+               } catch (Exception e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               
+               try {
+                       event.next();
+               } catch (Exception e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               
+               assertTrue(true);
+               
+       }
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaOutboundEventStreamTest.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaOutboundEventStreamTest.java
new file mode 100644 (file)
index 0000000..9dfbcad
--- /dev/null
@@ -0,0 +1,129 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.resources;
+
+import org.json.JSONException;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.resources.CambriaOutboundEventStream;
+import org.onap.dmaap.dmf.mr.resources.CambriaOutboundEventStream.operation;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+public class CambriaOutboundEventStreamTest {
+
+       private CambriaOutboundEventStream coes = null;
+       
+       @Before
+       public void setUp() throws Exception {
+               coes = new CambriaOutboundEventStream.Builder(null).timeout(10).limit(1).filter(CambriaConstants.kNoFilter)
+                               .pretty(false).withMeta(true).build();
+               DMaaPContext ctx = new DMaaPContext();
+               //ctx.set...
+               coes.setDmaapContext(ctx);
+               coes.setTopic(null);
+               coes.setTransEnabled(true);
+               coes.setTopicStyle(true);
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetSentCount() {
+               int sentCount = coes.getSentCount();
+               assertTrue("Doesn't match, got " + sentCount, sentCount==0);;
+       }
+
+       @Test
+       public void testWrite() {
+               //fail("Not yet implemented");
+       }
+
+       @Test
+       public void testForEachMessage() {
+               try {
+                       coes.forEachMessage(new operation(){
+
+                               @Override
+                               public void onWait() throws IOException {
+                                       // TODO Auto-generated method stub
+                                       
+                               }
+
+                               @Override
+                               public void onMessage(int count, String msg, String transId, long offSet)
+                                               throws IOException, JSONException {
+                                       // TODO Auto-generated method stub
+                                       
+                               }
+                               
+                       });
+               } catch (Exception e) {
+                       // TODO Auto-generated catch block
+               } 
+       }
+
+       @Test
+       public void testGetDmaapContext() {
+               DMaaPContext ctx = coes.getDmaapContext();
+               
+               assertNotNull(ctx);
+       }
+
+       @Test
+       public void testSetDmaapContext() {
+               DMaaPContext ctx = new DMaaPContext();
+               coes.setDmaapContext(ctx);
+               assertTrue(ctx.equals(coes.getDmaapContext()));
+       }
+
+       @Test
+       public void testGetTopic() {
+               coes.getTopic();
+               assertTrue(true);
+       }
+
+       @Test
+       public void testSetTopic() {
+               //fail("Not yet implemented");
+       }
+
+       @Test
+       public void testSetTopicStyle() {
+               coes.setTopicStyle(true);
+               assertTrue(true);
+       }
+
+       @Test
+       public void testSetTransEnabled() {
+               coes.setTransEnabled(true);
+               assertTrue(true);
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..5fd3143
--- /dev/null
@@ -0,0 +1,42 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.resources;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({CambriaEventSetTest.class, CambriaOutboundEventStreamTest.class,})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/TestRunner.java
new file mode 100644 (file)
index 0000000..c701f10
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.resources;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaJsonStreamReaderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaJsonStreamReaderTest.java
new file mode 100644 (file)
index 0000000..ddfc435
--- /dev/null
@@ -0,0 +1,69 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.resources.streamReaders;
+
+import org.apache.commons.io.IOUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaJsonStreamReader;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import static org.junit.Assert.assertTrue;
+
+public class CambriaJsonStreamReaderTest {
+       
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       
+       @Test
+       public void testNext() {
+               
+               CambriaJsonStreamReader test = null;
+       
+               String source = "{'name': 'tester', 'id': '2'}";
+               InputStream stream = null;
+               try {
+                       stream = IOUtils.toInputStream(source, "UTF-8");
+                       test = new CambriaJsonStreamReader(stream,"hello");
+                       test.next();
+               } catch (IOException e1) {
+                       // TODO Auto-generated catch block
+                       e1.printStackTrace();
+               } catch (CambriaApiException e1) {
+                       e1.printStackTrace();
+               }
+               
+               assertTrue(true);
+               
+       }
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaRawStreamReaderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaRawStreamReaderTest.java
new file mode 100644 (file)
index 0000000..ab58082
--- /dev/null
@@ -0,0 +1,71 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.resources.streamReaders;
+
+import org.apache.commons.io.IOUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.Publisher.message;
+import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaRawStreamReader;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import static org.junit.Assert.assertNotNull;
+
+public class CambriaRawStreamReaderTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testNext() {
+               
+               CambriaRawStreamReader test = null;
+               message msg = null;
+
+               String source = "{'name': 'tester', 'id': '2'}";
+               InputStream stream = null;
+               try {
+                       stream = IOUtils.toInputStream(source, "UTF-8");
+                       test = new CambriaRawStreamReader(stream,"hello");
+                       msg = test.next();
+               } catch (IOException e1) {
+                       // TODO Auto-generated catch block
+                       e1.printStackTrace();
+               } catch (CambriaApiException e1) {
+                       e1.printStackTrace();
+               }
+               
+               assertNotNull(msg);
+       
+               
+       }
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaStreamReaderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaStreamReaderTest.java
new file mode 100644 (file)
index 0000000..6f54ac0
--- /dev/null
@@ -0,0 +1,72 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.resources.streamReaders;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaStreamReader;
+
+import org.apache.commons.io.IOUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+
+public class CambriaStreamReaderTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testNext() {
+               
+               CambriaStreamReader test = null;
+               
+
+               String source = "{'name': 'tester', 'id': '2', 'message': 'hello'}";
+               InputStream stream = null;
+               try {
+                       stream = IOUtils.toInputStream(source, "UTF-8");
+                       test = new CambriaStreamReader(stream);
+                       test.next();
+               } catch (IOException e1) {
+                       // TODO Auto-generated catch block
+                       e1.printStackTrace();
+               } catch (CambriaApiException e1) {
+                       e1.printStackTrace();
+               }
+               
+               assertTrue(true);
+       
+               
+       }
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaTextStreamReaderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaTextStreamReaderTest.java
new file mode 100644 (file)
index 0000000..a8a4886
--- /dev/null
@@ -0,0 +1,69 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.resources.streamReaders;
+
+import org.apache.commons.io.IOUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaStreamReader;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import static org.junit.Assert.assertTrue;
+
+public class CambriaTextStreamReaderTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testNext() {
+               
+               CambriaStreamReader test = null;
+               
+               String source = "{'name': 'tester', 'id': '2', 'message': 'hello'}";
+               InputStream stream = null;
+               try {
+                       stream = IOUtils.toInputStream(source, "UTF-8");
+                       test = new CambriaStreamReader(stream);
+                       test.next();
+               } catch (IOException e1) {
+                       // TODO Auto-generated catch block
+                       e1.printStackTrace();
+               } catch (CambriaApiException e1) {
+                       e1.printStackTrace();
+               }
+               
+               assertTrue(true);
+       
+               
+       }
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..da7626b
--- /dev/null
@@ -0,0 +1,43 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.resources.streamReaders;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({CambriaJsonStreamReaderTest.class, CambriaRawStreamReaderTest.class,
+    CambriaStreamReaderTest.class, CambriaTextStreamReaderTest.class,})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/TestRunner.java
new file mode 100644 (file)
index 0000000..22f7656
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.resources.streamReaders;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/DMaaPAuthenticatorImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/security/DMaaPAuthenticatorImplTest.java
new file mode 100644 (file)
index 0000000..dd607cb
--- /dev/null
@@ -0,0 +1,123 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.security;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl;
+import org.springframework.mock.web.MockHttpServletRequest;
+
+import static org.junit.Assert.assertTrue;
+
+
+public class DMaaPAuthenticatorImplTest {
+       
+       private MockHttpServletRequest request = null;
+       @Before
+       public void setUp() throws Exception {
+               //creating servlet object
+               request = new MockHttpServletRequest();
+               request.setServerName("www.example.com");
+               request.setRequestURI("/foo");
+               request.setQueryString("param1=value1&param");
+               String url = request.getRequestURL() + "?" + request.getQueryString(); 
+
+               
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       /*@Test
+       public void testAuthenticate() {
+               
+               DMaaPAuthenticatorImpl<?> authenticator = new DMaaPAuthenticatorImpl(null);
+               
+               DMaaPContext ctx = new DMaaPContext();
+               authenticator.authenticate(ctx);
+               assertTrue(true);
+               
+       }*/
+       
+       
+       
+       /*@Test
+       public void testAafPermissionString() {
+               
+               DMaaPAAFAuthenticatorImpl authenticator = new DMaaPAAFAuthenticatorImpl();
+               try {
+                       authenticator.aafPermissionString("testTopic", "admin");
+               } catch (CambriaApiException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               
+               assertTrue(true);
+               
+       }*/
+       
+       @Test
+       public void testIsAuthentic() {
+               
+               DMaaPAuthenticatorImpl<?> authenticator = new DMaaPAuthenticatorImpl(null);
+               
+               authenticator.isAuthentic(request);
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testQualify() {
+               
+               DMaaPAuthenticatorImpl<?> authenticator = new DMaaPAuthenticatorImpl(null);
+               
+               authenticator.qualify(request);
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testAddAuthenticator() {
+               
+               DMaaPAuthenticatorImpl authenticator = new DMaaPAuthenticatorImpl(null);
+               DMaaPAuthenticator authenticator2 = new DMaaPAuthenticatorImpl(null);
+               
+               authenticator.addAuthenticator(authenticator2);
+               assertTrue(true);
+               
+       }
+       
+       /*@Test
+       public void testGetAuthenticatedUser() {
+               
+               
+               DMaaPContext ctx = new DMaaPContext();
+               DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+               assertTrue(true);
+               
+       }
+       */
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/security/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..667ee04
--- /dev/null
@@ -0,0 +1,44 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.security;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticatorImplTest;
+
+@RunWith(Suite.class)
+@SuiteClasses({DMaaPAAFAuthenticatorImplTest.class, DMaaPAuthenticatorImplTest.class,
+})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/security/TestRunner.java
new file mode 100644 (file)
index 0000000..30a6387
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.security;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPMechIdAuthenticatorTest.java b/src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPMechIdAuthenticatorTest.java
new file mode 100644 (file)
index 0000000..8612fca
--- /dev/null
@@ -0,0 +1,100 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.security.impl;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.security.impl.DMaaPMechIdAuthenticator;
+import org.springframework.mock.web.MockHttpServletRequest;
+
+import static org.junit.Assert.assertTrue;
+
+
+public class DMaaPMechIdAuthenticatorTest {
+       
+       private MockHttpServletRequest request = null;
+       @Before
+       public void setUp() throws Exception {
+               //creating servlet object
+               request = new MockHttpServletRequest();
+               request.setServerName("www.example.com");
+               request.setRequestURI("/foo");
+               request.setQueryString("param1=value1&param");
+               String url = request.getRequestURL() + "?" + request.getQueryString(); 
+
+               
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+       
+       
+       @Test
+       public void testQualify() {
+               
+               DMaaPMechIdAuthenticator authenticator = new DMaaPMechIdAuthenticator();
+               
+               authenticator.qualify(request);
+               assertTrue(true);
+               
+       }       
+       
+       @Test
+       public void testAuthenticate() {
+               
+               DMaaPMechIdAuthenticator authenticator = new DMaaPMechIdAuthenticator();
+               
+               DMaaPContext ctx = new DMaaPContext();
+               authenticator.authenticate(ctx);
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testIsAuthentic() {
+               DMaaPMechIdAuthenticator authenticator = new DMaaPMechIdAuthenticator();
+               
+               authenticator.isAuthentic(request);
+               assertTrue(true);
+               
+       }
+       
+       
+       
+       @Test
+       public void testAddAuthenticator() {
+               
+               DMaaPMechIdAuthenticator authenticator = new DMaaPMechIdAuthenticator();
+               DMaaPMechIdAuthenticator authenticator2 = new DMaaPMechIdAuthenticator();
+               
+               authenticator.addAuthenticator(authenticator2);
+               assertTrue(true);
+               
+       }
+       
+       
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPOriginalUebAuthenticatorTest.java b/src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPOriginalUebAuthenticatorTest.java
new file mode 100644 (file)
index 0000000..be3f5a2
--- /dev/null
@@ -0,0 +1,110 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.security.impl;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator;
+import org.springframework.mock.web.MockHttpServletRequest;
+
+import static org.junit.Assert.assertTrue;
+
+
+public class DMaaPOriginalUebAuthenticatorTest {
+       
+       private MockHttpServletRequest request = null;
+       @Before
+       public void setUp() throws Exception {
+               //creating servlet object
+               request = new MockHttpServletRequest();
+               request.setServerName("www.example.com");
+               request.setRequestURI("/foo");
+               request.setQueryString("param1=value1&param");
+               String url = request.getRequestURL() + "?" + request.getQueryString(); 
+
+               
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+       
+       
+       @Test
+       public void testQualify() {
+               
+               DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100);
+               
+               authenticator.qualify(request);
+               assertTrue(true);
+               
+       }       
+       
+       @Test
+       public void testAuthenticate() {
+               
+               DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100);
+               
+               DMaaPContext ctx = new DMaaPContext();
+               authenticator.authenticate(ctx);
+               assertTrue(true);
+               
+       }
+       
+       @Test
+       public void testAuthenticate2() {
+               
+               DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100);
+               DMaaPContext ctx = new DMaaPContext();
+               authenticator.authenticate("google.com", "xHMDwk25kwkkyi26JH","Dec 16, 2016", "Dec/16/2016","123");
+               
+               
+       }
+       
+       @Test
+       public void testIsAuthentic() {
+               DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100);
+               
+               authenticator.isAuthentic(request);
+               assertTrue(true);
+               
+       }
+       
+       
+       
+       @Test
+       public void testAddAuthenticator() {
+               
+               DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100);
+               DMaaPOriginalUebAuthenticator authenticator2 = new DMaaPOriginalUebAuthenticator(null, 100);
+               
+               authenticator.addAuthenticator(authenticator2);
+               assertTrue(true);
+               
+       }
+       
+       
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/impl/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/security/impl/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..48cf300
--- /dev/null
@@ -0,0 +1,43 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.security.impl;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({DMaaPMechIdAuthenticatorTest.class, DMaaPOriginalUebAuthenticatorTest.class,
+})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/impl/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/security/impl/TestRunner.java
new file mode 100644 (file)
index 0000000..57285f6
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.security.impl;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/AdminServiceImplemTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/AdminServiceImplemTest.java
new file mode 100644 (file)
index 0000000..22db674
--- /dev/null
@@ -0,0 +1,183 @@
+/*-
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.service.impl;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.limits.Blacklist;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.dmf.mr.service.impl.AdminServiceImpl;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertTrue;
+
+@PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"})
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({ DMaaPAuthenticatorImpl.class, DMaaPResponseBuilder.class })
+public class AdminServiceImplemTest {
+
+       @InjectMocks
+       AdminServiceImpl adminServiceImpl;
+
+       @Mock
+       DMaaPContext dmaapContext;
+       @Mock
+       ConsumerFactory factory;
+
+       @Mock
+       ConfigurationReader configReader;
+       @Mock
+       Blacklist Blacklist;
+
+       @Before
+       public void setUp() throws Exception {
+
+               MockitoAnnotations.initMocks(this);
+               PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class);
+               NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password");
+
+               PowerMockito.when(dmaapContext.getConfigReader()).thenReturn(configReader);
+               PowerMockito.when(configReader.getfConsumerFactory()).thenReturn(factory);
+               PowerMockito.when(configReader.getfIpBlackList()).thenReturn(Blacklist);
+
+               PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user);
+               PowerMockito.mockStatic(DMaaPResponseBuilder.class);
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       // ISSUES WITH AUTHENTICATION
+       @Test
+       public void testShowConsumerCache() {
+
+               try {
+                       adminServiceImpl.showConsumerCache(dmaapContext);
+               } catch (IOException | AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+                       assertTrue(true);
+               }
+
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+
+       }
+
+       @Test
+       public void testDropConsumerCache() {
+
+               try {
+                       adminServiceImpl.dropConsumerCache(dmaapContext);
+               } catch (IOException | AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+                       assertTrue(true);
+               }
+
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+
+       }
+
+       @Test
+       public void testGetBlacklist() {
+
+               try {
+                       adminServiceImpl.getBlacklist(dmaapContext);
+               } catch (IOException | AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+                       assertTrue(true);
+               }
+
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+
+       }
+
+       @Test
+       public void testAddToBlacklist() {
+
+               try {
+                       adminServiceImpl.addToBlacklist(dmaapContext, "120.120.120.120");
+               } catch (IOException | AccessDeniedException | ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+                       assertTrue(true);
+               }
+
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+
+       }
+
+       @Test
+       public void testRemoveFromBlacklist() {
+
+               try {
+                       adminServiceImpl.removeFromBlacklist(dmaapContext, "120.120.120.120");
+               } catch (IOException | AccessDeniedException | ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+                       assertTrue(true);
+               }
+
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/ApiKeysServiceImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/ApiKeysServiceImplTest.java
new file mode 100644 (file)
index 0000000..b218185
--- /dev/null
@@ -0,0 +1,333 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+ package org.onap.dmaap.mr.cambria.service.impl;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory;
+import org.onap.dmaap.dmf.mr.beans.ApiKeyBean;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.dmf.mr.service.impl.ApiKeysServiceImpl;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
+import org.onap.dmaap.dmf.mr.utils.Emailer;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.limits.Blacklist;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"})
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({ DMaaPAuthenticatorImpl.class, DMaaPResponseBuilder.class })
+public class ApiKeysServiceImplTest {
+       
+       @InjectMocks
+       ApiKeysServiceImpl service;
+
+       @Mock
+       DMaaPContext dmaapContext;
+       @Mock
+       ConsumerFactory factory;
+
+       @Mock
+       ConfigurationReader configReader;
+       @Mock
+       Blacklist Blacklist;
+       @Mock
+       Emailer emailer;
+
+       @Before
+       public void setUp() throws Exception {
+
+               MockitoAnnotations.initMocks(this);
+               PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class);
+               NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password");
+
+               PowerMockito.when(dmaapContext.getConfigReader()).thenReturn(configReader);
+               PowerMockito.when(configReader.getfConsumerFactory()).thenReturn(factory);
+               PowerMockito.when(configReader.getfIpBlackList()).thenReturn(Blacklist);
+               
+               PowerMockito.when(configReader.getfApiKeyDb()).thenReturn(fApiKeyDb);
+               PowerMockito.when(configReader.getSystemEmailer()).thenReturn(emailer);
+               PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user);
+               PowerMockito.mockStatic(DMaaPResponseBuilder.class);
+       
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       
+       @Test
+       public void testGetAllApiKeys() {
+               
+                service = new ApiKeysServiceImpl();
+               try {
+                       service.getAllApiKeys(dmaapContext);
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testGetApiKey() {
+               
+               ApiKeysServiceImpl service = new ApiKeysServiceImpl();
+               try {
+                       service.getApiKey(dmaapContext, "testkey");
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+               }
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testGetApiKey_error() {
+               
+               ApiKeysServiceImpl service = new ApiKeysServiceImpl();
+               try {
+                       service.getApiKey(dmaapContext, "k35Hdw6Sde");
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+        
+       }
+       
+       @Test
+       public void testCreateApiKey() {
+               
+               ApiKeysServiceImpl service = new ApiKeysServiceImpl();
+               try {
+                       service.createApiKey(dmaapContext, new ApiKeyBean("test@onap.com", "testing apikey bean"));
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (KeyExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch(NoClassDefFoundError e) {
+                       
+               }
+                assertTrue(true);
+       }
+       
+       @Test
+       public void testUpdateApiKey() {
+               
+               ApiKeysServiceImpl service = new ApiKeysServiceImpl();
+               try {
+                       
+                       service.updateApiKey(dmaapContext, "admin", new ApiKeyBean("test@onapt.com", "testing apikey bean"));
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+                assertTrue(true);
+        
+       }
+       @Test
+       public void testUpdateApiKey_error() {
+               
+               ApiKeysServiceImpl service = new ApiKeysServiceImpl();
+               try {
+                       
+                       service.updateApiKey(dmaapContext, null, new ApiKeyBean("test@onapt.com", "testing apikey bean"));
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                        assertTrue(true);
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+        
+       }
+       
+       @Test
+       public void testDeleteApiKey() {
+               
+               ApiKeysServiceImpl service = new ApiKeysServiceImpl();
+               try {
+                       
+                       service.deleteApiKey(dmaapContext, null);
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+        
+       }
+       
+       @Test
+       public void testDeleteApiKey_error() {
+               
+               ApiKeysServiceImpl service = new ApiKeysServiceImpl();
+               try {
+                       
+                       service.deleteApiKey(dmaapContext, "admin");
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+        
+       }
+       
+       NsaApiDb<NsaSimpleApiKey> fApiKeyDb= new NsaApiDb<NsaSimpleApiKey>() {
+               
+               
+               Set<String> keys = new HashSet<>(Arrays.asList("testkey","admin"));
+               
+               
+               @Override
+               public NsaSimpleApiKey createApiKey(String arg0, String arg1)
+                               throws KeyExistsException, ConfigDbException {
+                       // TODO Auto-generated method stub
+                       return new NsaSimpleApiKey(arg0, arg1);
+               }
+
+               @Override
+               public boolean deleteApiKey(NsaSimpleApiKey arg0) throws ConfigDbException {
+                       // TODO Auto-generated method stub
+                       return false;
+               }
+
+               @Override
+               public boolean deleteApiKey(String arg0) throws ConfigDbException {
+                       // TODO Auto-generated method stub
+                       return false;
+               }
+
+               @Override
+               public Map<String, NsaSimpleApiKey> loadAllKeyRecords() throws ConfigDbException {
+                       // TODO Auto-generated method stub
+                       return null;
+               }
+
+               @Override
+               public Set<String> loadAllKeys() throws ConfigDbException {
+                       // TODO Auto-generated method stub
+                       
+                       return keys ;
+               }
+
+               @Override
+               public NsaSimpleApiKey loadApiKey(String arg0) throws ConfigDbException {
+                       if(!keys.contains(arg0)){
+                               return null;
+                       }
+                       return new NsaSimpleApiKey(arg0, "password");
+               }
+
+               @Override
+               public void saveApiKey(NsaSimpleApiKey arg0) throws ConfigDbException {
+                       // TODO Auto-generated method stub
+                       
+               }
+       };
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/BaseTransactionDbImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/BaseTransactionDbImplTest.java
new file mode 100644 (file)
index 0000000..81a5cf7
--- /dev/null
@@ -0,0 +1,147 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.service.impl;
+
+import com.att.nsa.configs.ConfigDbException;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.service.impl.BaseTransactionDbImpl;
+import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionObjDB.KeyExistsException;
+
+import static org.junit.Assert.assertTrue;
+
+public class BaseTransactionDbImplTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+       
+       @Test
+       public void testCreateTransactionObj() {
+               
+               
+               try {
+                       
+                       BaseTransactionDbImpl service = new BaseTransactionDbImpl(null, null);
+                       service.createTransactionObj("transition");
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               } catch (KeyExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }               
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testSaveTransactionObj() {
+               
+               
+               try {
+                       
+                       BaseTransactionDbImpl service = new BaseTransactionDbImpl(null, null);
+                       service.saveTransactionObj(null);
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }               
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testLoadTransactionObj() {
+               
+               try {
+                       
+                       BaseTransactionDbImpl service = new BaseTransactionDbImpl(null, null);
+                       service.loadTransactionObj("34");
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }               
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+
+       @Test
+       public void testLoadAllTransactionObjs() {
+               
+               try {
+                       
+                       BaseTransactionDbImpl service = new BaseTransactionDbImpl(null, null);
+                       service.loadAllTransactionObjs();
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }               
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+
+
+       
+       
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..5ba8ec2
--- /dev/null
@@ -0,0 +1,47 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.service.impl;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+import org.onap.dmaap.dmf.mr.service.impl.EventsServiceImplTest;
+import org.onap.dmaap.dmf.mr.service.impl.TopicServiceImplTest;
+
+@RunWith(Suite.class)
+@SuiteClasses({UIServiceImplTest.class, AdminServiceImplemTest.class, ApiKeysServiceImplTest.class,
+    ShowConsumerCacheTest.class, TopicServiceImplTest.class, TransactionServiceImplTest.class,
+    MMServiceImplTest.class,
+    BaseTransactionDbImplTest.class, MetricsServiceImplTest.class, EventsServiceImplTest.class})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MMServiceImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MMServiceImplTest.java
new file mode 100644 (file)
index 0000000..0f3015f
--- /dev/null
@@ -0,0 +1,382 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.service.impl;
+
+import static org.junit.Assert.assertTrue;
+
+import com.att.ajsc.beans.PropertiesMapBean;
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.limits.Blacklist;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+import java.util.ConcurrentModificationException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Matchers;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages;
+import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.dmf.mr.service.impl.MMServiceImpl;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
+import org.onap.dmaap.dmf.mr.utils.Emailer;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+import org.springframework.mock.web.MockHttpServletRequest;
+import org.springframework.mock.web.MockHttpServletResponse;
+
+@PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"})
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({ DMaaPAuthenticatorImpl.class, DMaaPResponseBuilder.class, PropertiesMapBean.class,
+               AJSCPropertiesMap.class })
+public class MMServiceImplTest {
+
+       @InjectMocks
+       MMServiceImpl service;
+
+       @Mock
+       DMaaPContext dmaapContext;
+       @Mock
+       ConsumerFactory factory;
+       @Mock
+       private DMaaPErrorMessages errorMessages;
+       @Mock
+       ConfigurationReader configReader;
+       @Mock
+       Blacklist Blacklist;
+       @Mock
+       Emailer emailer;
+       @Mock
+       DMaaPKafkaMetaBroker dmaapKafkaMetaBroker;
+       @Mock
+       Topic metatopic;
+
+       @Before
+       public void setUp() throws Exception {
+
+               MockitoAnnotations.initMocks(this);
+               PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class);
+               NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password");
+
+               PowerMockito.when(dmaapContext.getConfigReader()).thenReturn(configReader);
+               PowerMockito.when(configReader.getfConsumerFactory()).thenReturn(factory);
+               PowerMockito.when(configReader.getfIpBlackList()).thenReturn(Blacklist);
+
+               PowerMockito.when(configReader.getfApiKeyDb()).thenReturn(fApiKeyDb);
+               PowerMockito.when(configReader.getSystemEmailer()).thenReturn(emailer);
+               PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user);
+               PowerMockito.mockStatic(DMaaPResponseBuilder.class);
+
+               MockHttpServletRequest request = new MockHttpServletRequest();
+               MockHttpServletResponse response = new MockHttpServletResponse();
+               PowerMockito.when(dmaapContext.getRequest()).thenReturn(request);
+               PowerMockito.when(dmaapContext.getResponse()).thenReturn(response);
+
+               PowerMockito.mockStatic(AJSCPropertiesMap.class);
+               PowerMockito.mockStatic(PropertiesMapBean.class);
+               PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout")).thenReturn("1000");
+               PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty")).thenReturn("true");
+               PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta")).thenReturn("true");
+               PowerMockito.when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);
+
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testSubscribe_Blacklistip() {
+
+               try {
+                       PowerMockito.when(Blacklist.contains("127.0.0.1")).thenReturn(true);
+                       service.subscribe(dmaapContext, "testTopic", "CG1", "23");
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+               } catch (CambriaApiException e) {
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (UnavailableException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+       }
+
+       @Test
+       public void testSubscribe_NullTopic() {
+
+               try {
+                       PowerMockito.when(dmaapKafkaMetaBroker.getTopic(Matchers.anyString())).thenReturn(null);
+                       service.subscribe(dmaapContext, "testTopic", "CG1", "23");
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+                       assertTrue(true);
+               } catch (CambriaApiException e) {
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (UnavailableException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+       }
+       
+       @Test(expected = CambriaApiException.class)
+       public void testSubscribe_NullTopic_Error() throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
+       CambriaApiException, IOException {
+
+                   PowerMockito.when(configReader.getfMetrics()).thenThrow(new ConcurrentModificationException("Error occurred"));
+                       PowerMockito.when(dmaapKafkaMetaBroker.getTopic(Matchers.anyString())).thenReturn(metatopic);
+                       service.subscribe(dmaapContext, "testTopic", "CG1", "23");
+       }
+
+       @Test
+       public void testPushEvents_wttransaction() {
+
+               String source = "source of my InputStream";
+
+               try {
+                       InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8"));
+                       service.pushEvents(dmaapContext, "msgrtr.apinode.metrics.dmaap", iStream, "3", "12:00:00");
+
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+                       assertTrue(true);
+               } catch (CambriaApiException e) {
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (missingReqdSetting e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+       }
+       
+       @Test(expected = CambriaApiException.class)
+       public void testPushEvents_wttransaction_error() throws Exception {
+
+               String source = "source of my InputStream";
+
+               InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8"));
+               PowerMockito.mockStatic(AJSCPropertiesMap.class);
+               PowerMockito.mockStatic(PropertiesMapBean.class);
+               PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "event.batch.length")).thenReturn("-5");
+               PowerMockito.when(configReader.getfPublisher()).thenThrow(new ConcurrentModificationException("Error occurred"));
+               service.pushEvents(dmaapContext, "msgrtr.apinode.metrics.dmaap1", iStream, "3", "12:00:00");
+
+       }
+
+       @Test
+       public void testPushEvents() {
+
+               String source = "source of my InputStream";
+
+               try {
+                       InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8"));
+                       service.pushEvents(dmaapContext, "testTopic", iStream, "3", "12:00:00");
+
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+                       assertTrue(true);
+               } catch (CambriaApiException e) {
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (missingReqdSetting e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+       }
+
+       @Test
+       public void testPushEvents_blacklistip() {
+
+               String source = "source of my InputStream";
+
+               try {
+                       PowerMockito.when(Blacklist.contains("127.0.0.1")).thenReturn(true);
+                       InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8"));
+                       service.pushEvents(dmaapContext, "testTopic", iStream, "3", "12:00:00");
+
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+                       assertTrue(true);
+               } catch (CambriaApiException e) {
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (TopicExistsException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (AccessDeniedException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (missingReqdSetting e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+       }
+
+       NsaApiDb<NsaSimpleApiKey> fApiKeyDb = new NsaApiDb<NsaSimpleApiKey>() {
+
+               Set<String> keys = new HashSet<>(Arrays.asList("testkey", "admin"));
+
+               @Override
+               public NsaSimpleApiKey createApiKey(String arg0, String arg1)
+                               throws KeyExistsException, ConfigDbException {
+                       // TODO Auto-generated method stub
+                       return new NsaSimpleApiKey(arg0, arg1);
+               }
+
+               @Override
+               public boolean deleteApiKey(NsaSimpleApiKey arg0) throws ConfigDbException {
+                       // TODO Auto-generated method stub
+                       return false;
+               }
+
+               @Override
+               public boolean deleteApiKey(String arg0) throws ConfigDbException {
+                       // TODO Auto-generated method stub
+                       return false;
+               }
+
+               @Override
+               public Map<String, NsaSimpleApiKey> loadAllKeyRecords() throws ConfigDbException {
+                       // TODO Auto-generated method stub
+                       return null;
+               }
+
+               @Override
+               public Set<String> loadAllKeys() throws ConfigDbException {
+                       // TODO Auto-generated method stub
+
+                       return keys;
+               }
+
+               @Override
+               public NsaSimpleApiKey loadApiKey(String arg0) throws ConfigDbException {
+                       if (!keys.contains(arg0)) {
+                               return null;
+                       }
+                       return new NsaSimpleApiKey(arg0, "password");
+               }
+
+               @Override
+               public void saveApiKey(NsaSimpleApiKey arg0) throws ConfigDbException {
+                       // TODO Auto-generated method stub
+
+               }
+       };
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MessageTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MessageTest.java
new file mode 100644 (file)
index 0000000..e946b66
--- /dev/null
@@ -0,0 +1,64 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.service.impl;
+
+import org.onap.dmaap.dmf.mr.backends.Publisher.message;
+import org.onap.dmaap.dmf.mr.beans.LogDetails;
+
+public class MessageTest implements message {
+
+       @Override
+       public String getKey() {
+               // TODO Auto-generated method stub
+               return "123";
+       }
+
+       @Override
+       public String getMessage() {
+               // TODO Auto-generated method stub
+               return "Msg";
+       }
+
+       @Override
+       public void setLogDetails(LogDetails logDetails) {
+               // TODO Auto-generated method stub
+               
+       }
+
+       @Override
+       public LogDetails getLogDetails() {
+               // TODO Auto-generated method stub
+               return null;
+       }
+
+       @Override
+       public boolean isTransactionEnabled() {
+               // TODO Auto-generated method stub
+               return false;
+       }
+
+       @Override
+       public void setTransactionEnabled(boolean transactionEnabled) {
+               // TODO Auto-generated method stub
+               
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MetricsServiceImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MetricsServiceImplTest.java
new file mode 100644 (file)
index 0000000..51ee4d4
--- /dev/null
@@ -0,0 +1,95 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+ package org.onap.dmaap.mr.cambria.service.impl;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.CambriaApiException;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.service.impl.MetricsServiceImpl;
+
+public class MetricsServiceImplTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+       
+       @Test
+       public void testGet() {
+               
+               MetricsServiceImpl service = new MetricsServiceImpl();
+               try {
+                       service.get(new DMaaPContext());
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }               
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+       @Test
+       public void testGetMetricByName() {
+               
+               MetricsServiceImpl service = new MetricsServiceImpl();
+               try {
+                       service.getMetricByName(new DMaaPContext(), "uptime");
+               } catch (org.json.JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       //e.printStackTrace();
+                       assertTrue(true);
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (CambriaApiException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }               
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/ShowConsumerCacheTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/ShowConsumerCacheTest.java
new file mode 100644 (file)
index 0000000..36500d6
--- /dev/null
@@ -0,0 +1,93 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.service.impl;
+
+//@RunWith(MockitoJUnitRunner.class)
+//@RunWith(PowerMockRunner.class)
+//@PrepareForTest(PropertiesMapBean.class)
+public class ShowConsumerCacheTest {
+/*
+@InjectMocks
+TopicServiceImpl topicService;
+
+@Mock
+private DMaaPErrorMessages errorMessages;
+
+@Mock
+DMaaPContext dmaapContext;
+
+@Mock
+ConfigurationReader configReader;
+
+
+@Mock
+JSONObject consumers;
+
+@Mock
+JSONObject consumerObject;
+
+@Mock
+JSONArray jsonConsumersList;
+
+@Mock
+DMaaPAuthenticator<NsaSimpleApiKey> dmaaPAuthenticator;
+
+@Mock
+NsaApiKey user;
+
+@Mock
+NsaSimpleApiKey nsaSimpleApiKey;
+
+@Mock
+HttpServletRequest httpServReq;
+
+
+@Before
+public void setUp(){
+MockitoAnnotations.initMocks(this);
+}
+
+
+//@Test(expected = DMaaPAccessDeniedException.class)
+@Test
+public void testShowConsmerCache()throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException, JSONException{
+Assert.assertNotNull(topicService);
+
+String myName = "Brian";
+Object created = null;
+Object accessed = null;
+Object log = null;
+Object info = null;
+
+when(consumerObject.put("name", myName)).thenReturn(consumerObject);
+when(consumerObject.put("created", created)).thenReturn(consumerObject);
+when(consumerObject.put("accessed", accessed)).thenReturn(consumerObject);
+when(consumerObject.put("accessed", Consumer.class)).thenReturn(consumerObject);
+when(jsonConsumersList.put(consumerObject)).thenReturn(null);
+
+when(consumers.put("consumers", jsonConsumersList)).thenReturn(consumerObject);
+
+
+
+}*/
+
+
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TestRunner.java
new file mode 100644 (file)
index 0000000..5e296b7
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.service.impl;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TransactionServiceImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TransactionServiceImplTest.java
new file mode 100644 (file)
index 0000000..880fbe9
--- /dev/null
@@ -0,0 +1,95 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+
+ package org.onap.dmaap.mr.cambria.service.impl;
+
+import com.att.aft.dme2.internal.jettison.json.JSONException;
+import com.att.nsa.configs.ConfigDbException;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.service.impl.TransactionServiceImpl;
+import org.onap.dmaap.dmf.mr.transaction.TransactionObj;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertTrue;
+
+public class TransactionServiceImplTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+       
+       @Test
+       public void testCheckTransaction() {
+               
+               TransactionServiceImpl service = new TransactionServiceImpl();
+               service.checkTransaction(new TransactionObj("23", 1100, 1000, 10));             
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testGetAllTransactionObjs() {
+               
+               TransactionServiceImpl service = new TransactionServiceImpl();
+               try {
+                       service.getAllTransactionObjs(new DMaaPContext());
+               } catch (ConfigDbException | IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }               
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       @Test
+       public void testGetTransactionObj() {
+               
+               TransactionServiceImpl service = new TransactionServiceImpl();
+               try {
+                       service.getTransactionObj(new DMaaPContext(), "23");
+               } catch (ConfigDbException | IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (JSONException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }               
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+       }
+       
+       
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/UIServiceImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/UIServiceImplTest.java
new file mode 100644 (file)
index 0000000..6981153
--- /dev/null
@@ -0,0 +1,297 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.service.impl;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.onap.dmaap.dmf.mr.backends.ConsumerFactory;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import org.onap.dmaap.dmf.mr.metabroker.Topic;
+import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.dmf.mr.service.impl.UIServiceImpl;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import java.io.IOException;
+import java.util.*;
+
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.when;
+
+@PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"})
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({ DMaaPAuthenticatorImpl.class, DMaaPResponseBuilder.class })
+public class UIServiceImplTest {
+
+       @InjectMocks
+       UIServiceImpl service;
+
+       @Mock
+       DMaaPContext dmaapContext;
+       @Mock
+       ConsumerFactory factory;
+
+       @Mock
+       ConfigurationReader configReader;
+
+       @Mock
+       DMaaPKafkaMetaBroker dmaapKafkaMetaBroker;
+
+       @Mock
+       Topic metatopic;
+
+       @Before
+       public void setUp() throws Exception {
+               MockitoAnnotations.initMocks(this);
+               PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class);
+               NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password");
+
+               PowerMockito.when(dmaapContext.getConfigReader()).thenReturn(configReader);
+               PowerMockito.when(configReader.getfConsumerFactory()).thenReturn(factory);
+
+               PowerMockito.when(configReader.getfApiKeyDb()).thenReturn(fApiKeyDb);
+               PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user);
+               PowerMockito.mockStatic(DMaaPResponseBuilder.class);
+               PowerMockito.when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker);
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testHello() {
+
+               try {
+                       service.hello(dmaapContext);
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+
+       }
+
+       @Test
+       public void testGetApiKeysTable() {
+
+               try {
+                       service.getApiKeysTable(dmaapContext);
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               assertTrue(true);
+
+       }
+
+       @Test
+       public void testGetApiKey() {
+
+               try {
+                       service.getApiKey(dmaapContext, "admin");
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+
+       }
+
+       @Test
+       public void testGetApiKey_invalidkey() {
+
+               try {
+                       service.getApiKey(dmaapContext, "k56HmWT72J");
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+                       assertTrue(true);
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+
+       }
+
+       @Test
+       public void testGetTopicsTable() {
+
+               try {
+                       List<Topic> topics = new ArrayList<Topic>();
+                       topics.add(metatopic);
+                       when(dmaapKafkaMetaBroker.getAllTopics()).thenReturn(topics);
+                       service.getTopicsTable(dmaapContext);
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               assertTrue(true);
+
+       }
+
+       @Test
+       public void testGetTopic() {
+
+               try {
+                       when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(metatopic);
+                       service.getTopic(dmaapContext, "testTopic");
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+               assertTrue(true);
+       }
+
+       @Test
+       public void testGetTopic_nulltopic() {
+
+               try {
+                       when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null);
+                       service.getTopic(dmaapContext, "testTopic");
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       // e.printStackTrace();
+               } catch (ConfigDbException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (Exception e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+
+       }
+
+       NsaApiDb<NsaSimpleApiKey> fApiKeyDb = new NsaApiDb<NsaSimpleApiKey>() {
+
+               Set<String> keys = new HashSet<>(Arrays.asList("testkey", "admin"));
+
+               @Override
+               public NsaSimpleApiKey createApiKey(String arg0, String arg1)
+                               throws KeyExistsException, ConfigDbException {
+                       // TODO Auto-generated method stub
+                       return new NsaSimpleApiKey(arg0, arg1);
+               }
+
+               @Override
+               public boolean deleteApiKey(NsaSimpleApiKey arg0) throws ConfigDbException {
+                       // TODO Auto-generated method stub
+                       return false;
+               }
+
+               @Override
+               public boolean deleteApiKey(String arg0) throws ConfigDbException {
+                       // TODO Auto-generated method stub
+                       return false;
+               }
+
+               @Override
+               public Map<String, NsaSimpleApiKey> loadAllKeyRecords() throws ConfigDbException {
+                       Map<String, NsaSimpleApiKey> map = new HashMap<String, NsaSimpleApiKey>();
+                       map.put("testkey", new NsaSimpleApiKey("testkey", "password"));
+                       map.put("admin", new NsaSimpleApiKey("admin", "password"));
+
+                       return map;
+               }
+
+               @Override
+               public Set<String> loadAllKeys() throws ConfigDbException {
+                       // TODO Auto-generated method stub
+
+                       return keys;
+               }
+
+               @Override
+               public NsaSimpleApiKey loadApiKey(String arg0) throws ConfigDbException {
+                       if (!keys.contains(arg0)) {
+                               return null;
+                       }
+                       return new NsaSimpleApiKey(arg0, "password");
+               }
+
+               @Override
+               public void saveApiKey(NsaSimpleApiKey arg0) throws ConfigDbException {
+                       // TODO Auto-generated method stub
+
+               }
+       };
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..1fad8f3
--- /dev/null
@@ -0,0 +1,42 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.transaction;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({TransactionObjTest.class, TrnRequestTest.class,})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/TestRunner.java
new file mode 100644 (file)
index 0000000..7fc990e
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.transaction;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/TransactionObjTest.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/TransactionObjTest.java
new file mode 100644 (file)
index 0000000..36cdf92
--- /dev/null
@@ -0,0 +1,175 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.transaction;
+
+import static org.junit.Assert.*;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.onap.dmaap.dmf.mr.transaction.TransactionObj;
+
+public class TransactionObjTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testAsJsonObject() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               
+               try {
+                       obj.asJsonObject();
+                       
+               } catch(NullPointerException e) {
+                       assertTrue(true);
+               }
+                
+       }
+       
+       @Test
+       public void testGetId() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.getId();
+               assertTrue(true);
+
+       }
+       
+       @Test
+       public void testSetId() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.setId("23");
+               assertTrue(true);
+        
+       }
+
+       @Test
+       public void testGetCreateTime() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.getCreateTime();
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testSetCreateTime() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.setCreateTime("12:00:00");
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testSerialize() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.serialize();
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testGetTotalMessageCount() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.getTotalMessageCount();
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testSetTotalMessageCount() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.setTotalMessageCount(200);
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testGetSuccessMessageCount() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.getSuccessMessageCount();
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testSetSuccessMessageCount() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.setSuccessMessageCount(198);
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testGetFailureMessageCount() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.getFailureMessageCount();
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testSetFailureMessageCount() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.setFailureMessageCount(2);
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testGetfData() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.getfData();
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testSetfData() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.setfData(null);
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testGetTrnRequest() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.getTrnRequest();
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testSetTrnRequest() {
+               TransactionObj obj = new TransactionObj("23", 100, 98, 2);
+               obj.setTrnRequest(null);
+               assertTrue(true);
+                
+       }
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/TrnRequestTest.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/TrnRequestTest.java
new file mode 100644 (file)
index 0000000..f174a32
--- /dev/null
@@ -0,0 +1,185 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.transaction;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.transaction.TrnRequest;
+
+import static org.junit.Assert.assertTrue;
+
+public class TrnRequestTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetId() {
+               TrnRequest req = new TrnRequest();
+               
+               req.getId();
+               assertTrue(true);
+                
+       }
+       
+       @Test
+       public void testSetId() {
+               TrnRequest req = new TrnRequest();
+               
+               req.setId("23");
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testGetRequestCreate() {
+               TrnRequest req = new TrnRequest();
+               
+               req.getRequestCreate();
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testSetRequestCreate() {
+               TrnRequest req = new TrnRequest();
+               
+               req.setRequestCreate("createRequest");
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testGetRequestHost() {
+               TrnRequest req = new TrnRequest();
+               
+               req.getRequestHost();
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testSetRequestHost() {
+               TrnRequest req = new TrnRequest();
+               
+               req.setRequestHost("requestHost");
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testGetServerHost() {
+               TrnRequest req = new TrnRequest();
+               
+               req.getServerHost();
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testSetServerHost() {
+               TrnRequest req = new TrnRequest();
+               
+               req.setServerHost("requestHost");
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testGetMessageProceed() {
+               TrnRequest req = new TrnRequest();
+               
+               req.getMessageProceed();
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testSetMessageProceed() {
+               TrnRequest req = new TrnRequest();
+               
+               req.setMessageProceed("messageProceed");
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testGetTotalMessage() {
+               TrnRequest req = new TrnRequest();
+               
+               req.getTotalMessage();
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testSetTotalMessage() {
+               TrnRequest req = new TrnRequest();
+               
+               req.setTotalMessage("200");
+               assertTrue(true);
+        
+       }
+       
+       
+       @Test
+       public void testGetClientType() {
+               TrnRequest req = new TrnRequest();
+               
+               req.getClientType();
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testSetClientType() {
+               TrnRequest req = new TrnRequest();
+               
+               req.setClientType("admin");
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testGetUrl() {
+               TrnRequest req = new TrnRequest();
+               
+               req.getUrl();
+               assertTrue(true);
+        
+       }
+       
+       @Test
+       public void testSetUrl() {
+               TrnRequest req = new TrnRequest();
+               
+               req.setUrl("http://google.com");
+               assertTrue(true);
+        
+       }
+       
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/DMaaPSimpleTransactionFactoryTest.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/DMaaPSimpleTransactionFactoryTest.java
new file mode 100644 (file)
index 0000000..5fc2351
--- /dev/null
@@ -0,0 +1,66 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.transaction.impl;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory;
+
+import static org.junit.Assert.assertTrue;
+
+public class DMaaPSimpleTransactionFactoryTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testMakeNewTransactionObj() {
+               
+               DMaaPSimpleTransactionFactory factory = new DMaaPSimpleTransactionFactory();
+               
+               factory.makeNewTransactionObj("{'transactionId': '123', 'totalMessageCount': '200', "
+                               + "'successMessageCount': '200', 'failureMessageCount': '0'}");
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+               
+        
+       }
+       
+       @Test
+       public void testMakeNewTransactionId() {
+               
+               DMaaPSimpleTransactionFactory factory = new DMaaPSimpleTransactionFactory();
+               factory.makeNewTransactionId("123");
+               
+               String trueValue = "True";
+               assertTrue(trueValue.equalsIgnoreCase("True"));
+       
+       }
+       
+       
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..f173b57
--- /dev/null
@@ -0,0 +1,42 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.transaction.impl;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({DMaaPSimpleTransactionFactoryTest.class,})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/TestRunner.java
new file mode 100644 (file)
index 0000000..212f551
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.transaction.impl;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/utils/ConfigurationReaderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/utils/ConfigurationReaderTest.java
new file mode 100644 (file)
index 0000000..ae8d167
--- /dev/null
@@ -0,0 +1,56 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.cambria.utils;
+
+import static org.junit.Assert.assertNotNull;
+
+import org.junit.After;
+import org.junit.Test;
+
+import org.onap.dmaap.mr.cambria.embed.EmbedConfigurationReader;
+import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
+
+public class ConfigurationReaderTest {
+
+       EmbedConfigurationReader embedConfigurationReader = new EmbedConfigurationReader();
+
+       @After
+       public void tearDown() throws Exception {
+               embedConfigurationReader.tearDown();
+       }
+
+       @Test
+       public void testConfigurationReader() throws Exception {
+
+               ConfigurationReader configurationReader = embedConfigurationReader.buildConfigurationReader();
+
+               assertNotNull(configurationReader);
+               assertNotNull(configurationReader.getfApiKeyDb());
+               assertNotNull(configurationReader.getfConfigDb());
+               assertNotNull(configurationReader.getfConsumerFactory());
+               assertNotNull(configurationReader.getfIpBlackList());
+               assertNotNull(configurationReader.getfMetaBroker());
+               assertNotNull(configurationReader.getfMetrics());
+               assertNotNull(configurationReader.getfPublisher());
+               assertNotNull(configurationReader.getfSecurityManager());
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPCuratorFactoryTest.java b/src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPCuratorFactoryTest.java
new file mode 100644 (file)
index 0000000..fdcb434
--- /dev/null
@@ -0,0 +1,67 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.utils;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.util.Map;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import org.onap.dmaap.dmf.mr.constants.CambriaConstants;
+import org.onap.dmaap.dmf.mr.utils.DMaaPCuratorFactory;
+import org.onap.dmaap.dmf.mr.utils.PropertyReader;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException;
+
+public class DMaaPCuratorFactoryTest {
+
+       @Before
+       public void setUp() throws Exception {
+               ClassLoader classLoader = getClass().getClassLoader();          
+               AJSCPropertiesMap.refresh(new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile()));
+               
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testgetCurator() throws loadException {
+               CuratorFramework curatorFramework = DMaaPCuratorFactory.getCurator(new PropertyReader());
+               assertNotNull(curatorFramework);
+               
+               Map<String, String> map = AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop);
+               map.remove(CambriaConstants.kSetting_ZkConfigDbServers);
+               map.remove(CambriaConstants.kSetting_ZkSessionTimeoutMs);
+               
+               
+               
+               curatorFramework = DMaaPCuratorFactory.getCurator(new PropertyReader());
+               assertNotNull(curatorFramework);
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPResponseBuilderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPResponseBuilderTest.java
new file mode 100644 (file)
index 0000000..326e399
--- /dev/null
@@ -0,0 +1,178 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+ package org.onap.dmaap.mr.cambria.utils;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
+import org.springframework.mock.web.MockHttpServletRequest;
+import org.springframework.mock.web.MockHttpServletResponse;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
+public class DMaaPResponseBuilderTest {
+       
+       DMaaPContext dMaapContext;
+       MockHttpServletRequest request;
+       MockHttpServletResponse response;
+
+       @Before
+       public void setUp() throws Exception {
+               
+               dMaapContext = new DMaaPContext();
+               request = new MockHttpServletRequest();
+               response = new MockHttpServletResponse();
+               dMaapContext.setRequest(request);
+               dMaapContext.setResponse(response);
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testsetNoCacheHeadings(){           
+               DMaaPResponseBuilder.setNoCacheHeadings(dMaapContext);          
+               assertEquals("no-cache", response.getHeader("Pragma"));
+       }
+       
+       @Test
+       public void testrespondOk() throws JSONException, IOException{
+               JSONObject jsonObject = new JSONObject();
+               jsonObject.put("Name", "Test");
+               
+               DMaaPResponseBuilder.respondOk(dMaapContext, jsonObject);
+               assertEquals("application/json", response.getContentType());
+               assertEquals(200, response.getStatus());
+               
+               request.setMethod("HEAD");
+               
+               DMaaPResponseBuilder.respondOk(dMaapContext, jsonObject);
+               assertEquals("application/json", response.getContentType());
+               assertEquals(200, response.getStatus());
+       }
+       
+       @Test
+       public void testrespondOkNoContent(){
+               DMaaPResponseBuilder.respondOkNoContent(dMaapContext);
+               assertEquals(204, response.getStatus());
+       }
+       
+       @Test
+       public void testrespondOkNoContentError(){
+               dMaapContext.setResponse(null);
+               DMaaPResponseBuilder.respondOkNoContent(dMaapContext);
+               assertNull(dMaapContext.getResponse());
+       }
+       
+       @Test
+       public void testrespondOkWithHtml(){
+               DMaaPResponseBuilder.respondOkWithHtml(dMaapContext, "<head></head>");
+               
+               assertEquals("text/html", response.getContentType());
+               DMaaPResponseBuilder.respondOkWithHtml(dMaapContext, "<head></head>");
+               assertEquals(200, response.getStatus());
+       }
+       
+       @Test
+       public void testrespondOkWithHtmlError(){
+               dMaapContext.setResponse(null);
+               DMaaPResponseBuilder.respondOkWithHtml(dMaapContext, "<head></head>");
+               assertNull(dMaapContext.getResponse());
+       }
+       
+       @Test
+       public void testrespondWithError(){
+               DMaaPResponseBuilder.respondWithError(dMaapContext, 500, "InternalServerError");
+               assertEquals(500, response.getStatus());
+       }
+       
+       @Test(expected=NullPointerException.class)
+       public void testInvalidrespondWithError(){
+               dMaapContext.setResponse(null);
+               DMaaPResponseBuilder.respondWithError(dMaapContext, 500, "InternalServerError");
+       }
+       
+       @Test
+       public void testrespondWithJsonError(){
+               JSONObject o = new JSONObject();
+               o.put("status", 500);
+               o.put("message", "InternalServerError");
+               DMaaPResponseBuilder.respondWithError(dMaapContext, 500, o);
+               assertEquals(500, response.getStatus());
+       }
+       
+       @Test
+       public void testInvalidrespondWithJsonError(){
+               JSONObject o = new JSONObject();
+               o.put("status", 500);
+               o.put("message", "InternalServerError");
+               dMaapContext.setResponse(null);
+               DMaaPResponseBuilder.respondWithError(dMaapContext, 500, o);
+               assertNull(dMaapContext.getResponse());
+       }
+       
+       @Test
+       public void testrespondWithErrorInJson(){
+               DMaaPResponseBuilder.respondWithErrorInJson(dMaapContext, 500, "InternalServerError");
+               
+               assertEquals("application/json", response.getContentType());
+               assertEquals(500, response.getStatus());
+       }
+       
+       @Test
+       public void testsendErrorAndBody(){
+               DMaaPResponseBuilder.sendErrorAndBody(dMaapContext, 500, "InternalServerError", "text/html");
+               
+               assertEquals("text/html", response.getContentType());
+               assertEquals(500, response.getStatus());
+               
+               request.setMethod("HEAD");
+               
+               DMaaPResponseBuilder.sendErrorAndBody(dMaapContext, 500, "InternalServerError", "text/html");
+               
+               assertEquals("text/html", response.getContentType());
+               assertEquals(500, response.getStatus());
+               
+       }
+       
+       @Test
+       public void testgetStreamForBinaryResponse() throws IOException{
+               DMaaPResponseBuilder.getStreamForBinaryResponse(dMaapContext);
+               
+               assertEquals("application/octet-stream", response.getContentType());
+               assertEquals(200, response.getStatus());
+       }
+       
+       @Test(expected=NullPointerException.class)
+       public void testgetStreamForBinaryResponseError() throws IOException{
+               dMaapContext.setResponse(null);
+               DMaaPResponseBuilder.getStreamForBinaryResponse(dMaapContext);
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/utils/EMailerTest.java b/src/test/java/org/onap/dmaap/mr/cambria/utils/EMailerTest.java
new file mode 100644 (file)
index 0000000..45824a1
--- /dev/null
@@ -0,0 +1,43 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.dmaap.mr.cambria.utils;
+
+import org.junit.Test;
+import org.onap.dmaap.dmf.mr.utils.Emailer;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertTrue;
+
+public class EMailerTest {
+       
+       @Test
+       public void testEmailer(){
+               
+               Emailer emailer= new Emailer();
+               try {
+                       emailer.send("dummy@dummy.com", "subj", "body");
+               } catch (IOException e) {
+                       assertTrue(true);
+               }
+               
+       }
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/utils/UtilsTest.java b/src/test/java/org/onap/dmaap/mr/cambria/utils/UtilsTest.java
new file mode 100644 (file)
index 0000000..8a4009b
--- /dev/null
@@ -0,0 +1,136 @@
+/*******************************************************************************
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+ package org.onap.dmaap.mr.cambria.utils;
+
+import static org.junit.Assert.*;
+
+import java.security.Principal;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+
+import org.apache.http.auth.BasicUserPrincipal;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.springframework.mock.web.MockHttpServletRequest;
+
+import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
+import org.onap.dmaap.dmf.mr.utils.Utils;
+
+public class UtilsTest {
+
+       private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS";
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetFormattedDate() {
+               Date now = new Date();
+               String dateStr = Utils.getFormattedDate(now);
+               SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
+               String expectedStr = sdf.format(now);
+               assertNotNull(dateStr);
+               assertTrue("Formatted date does not match - expected [" + expectedStr
+                               + "] received [" + dateStr + "]",
+                               dateStr.equalsIgnoreCase(expectedStr));
+       }
+       
+       @Test
+       public void testgetUserApiKey(){
+               MockHttpServletRequest request = new MockHttpServletRequest();
+               request.addHeader(Utils.CAMBRIA_AUTH_HEADER, "User:Password");
+               assertEquals("User", Utils.getUserApiKey(request));
+               
+               MockHttpServletRequest request2 = new MockHttpServletRequest();
+               Principal principal = new BasicUserPrincipal("User@Test");
+               request2.setUserPrincipal(principal);
+               request2.addHeader("Authorization", "test");
+               assertEquals("User", Utils.getUserApiKey(request2));
+               
+               MockHttpServletRequest request3 = new MockHttpServletRequest();
+               assertNull(Utils.getUserApiKey(request3));
+       }
+       
+       @Test
+       public void testgetFromattedBatchSequenceId(){
+               Long x = new Long(1234);
+               String str = Utils.getFromattedBatchSequenceId(x);
+               assertEquals("001234", str);            
+       }
+       
+       @Test
+       public void testmessageLengthInBytes(){
+               String str = "TestString";
+               long length = Utils.messageLengthInBytes(str);
+               assertEquals(10, length);
+               assertEquals(0, Utils.messageLengthInBytes(null));
+       }
+
+       @Test
+       public void testgetResponseTransactionId(){
+               String transactionId = "test123::sampleResponseMessage";
+               assertEquals("test123",Utils.getResponseTransactionId(transactionId));
+               assertNull(Utils.getResponseTransactionId(null));
+               assertNull(Utils.getResponseTransactionId(""));
+       }
+       
+       @Test
+       public void testgetSleepMsForRate(){
+               long x = Utils.getSleepMsForRate(1024.124);
+               assertEquals(1000, x);
+               assertEquals(0, Utils.getSleepMsForRate(-1));
+       }
+       
+       @Test
+       public void testgetRemoteAddress(){
+               DMaaPContext dMaapContext = new DMaaPContext();
+               MockHttpServletRequest request = new MockHttpServletRequest();
+               
+               dMaapContext.setRequest(request);
+               
+               assertEquals(request.getRemoteAddr(), Utils.getRemoteAddress(dMaapContext));
+               
+               request.addHeader("X-Forwarded-For", "XForward");
+               assertEquals("XForward", Utils.getRemoteAddress(dMaapContext));
+               
+               
+       }
+       
+       @Test
+       public void testGetKey(){
+               assertNotNull(Utils.getKafkaproperty());
+               
+       }
+       
+       @Test
+       public void testCadiEnable(){
+               assertFalse(Utils.isCadiEnabled());
+               
+       }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/filter/ContentLengthFilterTest.java b/src/test/java/org/onap/dmaap/mr/filter/ContentLengthFilterTest.java
new file mode 100644 (file)
index 0000000..b1dacda
--- /dev/null
@@ -0,0 +1,86 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.filter;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class ContentLengthFilterTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testDestroy() {
+               ContentLengthFilter filter = new ContentLengthFilter();
+               
+               filter.destroy();
+               
+               assertTrue(true);
+       }
+       
+       
+       @Test
+       public void testFilter() {
+               ContentLengthFilter filter = new ContentLengthFilter();
+               
+               try {
+                       filter.doFilter(null, null, null);
+               } catch (IOException | ServletException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }
+               
+               
+       }
+       
+       @Test
+       public void testInit() {
+               ContentLengthFilter filter = new ContentLengthFilter();
+               
+               try {
+                       filter.init(null);
+               } catch (ServletException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               } catch (NullPointerException e) {
+                       // TODO Auto-generated catch block
+                       assertTrue(true);
+               }               
+               
+       }
+
+
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/filter/DefaultLengthTest.java b/src/test/java/org/onap/dmaap/mr/filter/DefaultLengthTest.java
new file mode 100644 (file)
index 0000000..f634807
--- /dev/null
@@ -0,0 +1,62 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.filter;
+
+import static org.junit.Assert.*;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+
+public class DefaultLengthTest {
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testGetDefaultLength() {
+               DefaultLength length = new DefaultLength();
+               
+               length.getDefaultLength();
+               
+               assertTrue(true);
+       }
+       
+       @Test
+       public void testSetDefaultLength() {
+               DefaultLength length = new DefaultLength();
+               
+               length.setDefaultLength("23");
+               
+               assertTrue(true);
+       }
+
+
+
+
+
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/filter/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/filter/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..7aa0d28
--- /dev/null
@@ -0,0 +1,42 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.filter;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({ContentLengthFilterTest.class, DefaultLengthTest.class,})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/filter/TestRunner.java b/src/test/java/org/onap/dmaap/mr/filter/TestRunner.java
new file mode 100644 (file)
index 0000000..d5b3972
--- /dev/null
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * ONAP Policy Engine
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.mr.filter;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/ApiKeyBean.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/ApiKeyBean.java
new file mode 100644 (file)
index 0000000..7ecc203
--- /dev/null
@@ -0,0 +1,72 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dmaap;
+
+import java.io.Serializable;
+
+public class ApiKeyBean implements Serializable {
+
+       /*private static final long serialVersionUID = -8219849086890567740L;
+
+       // private static final String KEY_CHARS =
+       // "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+
+
+       private String email;
+       private String description;
+
+       public ApiKeyBean() {
+               super();
+       }
+
+       public ApiKeyBean(String email, String description) {
+               super();
+               this.email = email;
+               this.description = description;
+       }
+
+       public String getEmail() {
+               return email;
+       }
+
+       public void setEmail(String email) {
+               this.email = email;
+       }
+
+       public String getDescription() {
+               return description;
+       }
+
+       public void setDescription(String description) {
+               this.description = description;
+       }
+
+       /*
+        * public String getKey() { return generateKey(16); }
+        * 
+        * public String getSharedSecret() { return generateKey(24); }
+        * 
+        * private static String generateKey ( int length ) { return
+        * uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length ); }
+        */
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapPubSubTest.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapPubSubTest.java
new file mode 100644 (file)
index 0000000..ca659f5
--- /dev/null
@@ -0,0 +1,121 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dmaap;
+
+public class DMaapPubSubTest {
+/*     private static final Logger LOGGER = Logger.getLogger(DMaapTopicTest.class);
+       Client client = ClientBuilder.newClient();
+       String url = LoadPropertyFile.getPropertyFileData().getProperty("url");
+       WebTarget target = client.target(url);
+       String topicapikey;
+       String topicsecretKey;
+       String serverCalculatedSignature;
+       String date = LoadPropertyFile.getPropertyFileData().getProperty("date");
+       // changes by islam
+       String topic_name = LoadPropertyFile.getPropertyFileData().getProperty("topicName");
+       DmaapApiKeyTest keyInstance = new DmaapApiKeyTest();
+
+
+       public void testProduceMessage() {
+               LOGGER.info("test case publish message");
+               // DMaapTopicTest topicCreation = new DMaapTopicTest();
+               DmaapApiKeyTest keyInstance = new DmaapApiKeyTest();
+               // creating topic
+               createTopic(topic_name);
+
+               target = client.target(url);
+               target = target.path("/events/");
+               target = target.path(topic_name);
+               Response response2 = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+                               .header("X-CambriaDate", date).post(Entity.json("{message:producing first message}"));
+               keyInstance.assertStatus(response2);
+               LOGGER.info("successfully published message");
+       }
+
+       public void testConsumeMessage() {
+               LOGGER.info("test case subscribing message");
+               createTopic(topic_name);
+               target = client.target(url);
+               target = target.path("/events/");
+               target = target.path(topic_name);
+               target = target.path("consumGrp");
+               target = target.path(topicapikey);
+               Response response = target.request().get();
+               keyInstance.assertStatus(response);
+               LOGGER.info("successfully consumed messages");
+               InputStream is = (InputStream) response.getEntity();
+               Scanner s = new Scanner(is);
+               s.useDelimiter("\\A");
+               String data = s.next();
+               s.close();
+               LOGGER.info("Consumed Message data: " + data);
+       }
+
+       public void createTopic(String name) {
+               if (!topicExist(name)) {
+                       TopicBean topicbean = new TopicBean();
+                       topicbean.setDescription("creating topic");
+                       topicbean.setPartitionCount(1);
+                       topicbean.setReplicationCount(1);
+                       topicbean.setTopicName(name);
+                       topicbean.setTransactionEnabled(true);
+                       target = client.target(url);
+                       target = target.path("/topics/create");
+                       JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "topic creation"));
+                       topicapikey = (String) jsonObj.get("key");
+                       topicsecretKey = (String) jsonObj.get("secret");
+                       serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey);
+                       Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+                                       .header("X-CambriaDate", date).post(Entity.json(topicbean));
+                       keyInstance.assertStatus(response);
+               }
+       }
+
+       public boolean topicExist(String topicName) {
+               target = target.path("/topics/" + topicName);
+               InputStream is, issecret;
+               Response response = target.request().get();
+               if (response.getStatus() == HttpStatus.SC_OK) {
+                       is = (InputStream) response.getEntity();
+                       Scanner s = new Scanner(is);
+                       s.useDelimiter("\\A");
+                       JSONObject dataObj = new JSONObject(s.next());
+                       s.close();
+                       // get owner of a topic
+                       topicapikey = (String) dataObj.get("owner");
+                       target = client.target(url);
+                       target = target.path("/apiKeys/");
+                       target = target.path(topicapikey);
+                       Response response2 = target.request().get();
+                       issecret = (InputStream) response2.getEntity();
+                       Scanner st = new Scanner(issecret);
+                       st.useDelimiter("\\A");
+                       JSONObject dataObj1 = new JSONObject(st.next());
+                       st.close();
+                       // get secret key of this topic//
+                       topicsecretKey = (String) dataObj1.get("secret");
+                       serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey);
+                       return true;
+               } else
+                       return false;
+       }*/
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapTopicTest.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapTopicTest.java
new file mode 100644 (file)
index 0000000..23f9d5b
--- /dev/null
@@ -0,0 +1,147 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dmaap;
+
+public class DMaapTopicTest {
+       /*private static final Logger LOGGER = Logger.getLogger(DMaapTopicTest.class);
+       Client client = ClientBuilder.newClient();
+       String topicapikey, topicsecretKey, serverCalculatedSignature;
+       Properties prop = LoadPropertyFile.getPropertyFileData();
+       String topicName = prop.getProperty("topicName");
+       String url = prop.getProperty("url");
+       String date = prop.getProperty("date");
+       WebTarget target = client.target(url);
+       DmaapApiKeyTest keyInstance = new DmaapApiKeyTest();
+
+
+
+       public boolean topicExist(String topicName) {
+               target = target.path("/topics/" + topicName);
+               InputStream is, issecret;
+               Response response = target.request().get();
+               if (response.getStatus() == HttpStatus.SC_OK) {
+                       is = (InputStream) response.getEntity();
+                       Scanner s = new Scanner(is);
+                       s.useDelimiter("\\A");
+                       JSONObject dataObj = new JSONObject(s.next());
+                       s.close();
+                       // get owner of a topic
+                       topicapikey = (String) dataObj.get("owner");
+                       target = client.target(url);
+                       target = target.path("/apiKeys/");
+                       target = target.path(topicapikey);
+                       Response response2 = target.request().get();
+                       issecret = (InputStream) response2.getEntity();
+                       Scanner st = new Scanner(issecret);
+                       st.useDelimiter("\\A");
+                       JSONObject dataObj1 = new JSONObject(st.next());
+                       st.close();
+                       // get secret key of this topic//
+                       topicsecretKey = (String) dataObj1.get("secret");
+                       serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey);
+                       return true;
+               } else
+                       return false;
+       }
+
+       public void testCreateTopic() {
+               LOGGER.info("test case create topic");
+               createTopic(topicName);
+               LOGGER.info("Returning after create topic");
+       }
+
+       public void testOneTopic() {
+               LOGGER.info("test case get specific topic name " + topicName);
+               createTopic(topicName);
+               target = client.target(url);
+               target = target.path("/topics/");
+               target = target.path(topicName);
+               Response response = target.request().get();
+               LOGGER.info("Successfully returned after fetching topic" + topicName);
+               keyInstance.assertStatus(response);
+               InputStream is = (InputStream) response.getEntity();
+               Scanner s = new Scanner(is);
+               s.useDelimiter("\\A");
+               JSONObject dataObj = new JSONObject(s.next());
+               LOGGER.info("Details of " + topicName + " : " + dataObj.toString());
+               s.close();
+       }
+
+       public void testdeleteTopic() {
+               LOGGER.info("test case delete topic name " + topicName);
+               createTopic(topicName);
+               target = client.target(url);
+               target = target.path("/topics/");
+               target = target.path(topicName);
+               Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+                               .header("X-CambriaDate", date).delete();
+               keyInstance.assertStatus(response);
+               LOGGER.info("Successfully returned after deleting topic" + topicName);
+       }
+
+       public void testAllTopic() {
+               LOGGER.info("test case fetch all topic");
+               target = client.target(url);
+               target = target.path("/topics");
+               Response response = target.request().get();
+               keyInstance.assertStatus(response);
+               LOGGER.info("successfully returned after fetching all the topic");
+               InputStream is = (InputStream) response.getEntity();
+               Scanner s = new Scanner(is);
+               s.useDelimiter("\\A");
+               JSONObject dataObj = new JSONObject(s.next());
+               s.close();
+               LOGGER.info("List of all topics " + dataObj.toString());
+       }
+
+       public void testPublisherForTopic() {
+               LOGGER.info("test case get all publishers for topic: " + topicName);
+               // creating topic to check
+               createTopic(topicName);
+               target = client.target(url);
+               target = target.path("/topics/");
+               target = target.path(topicName);
+               target = target.path("/producers");
+               // checking all producer for a particular topic
+               Response response = target.request().get();
+               keyInstance.assertStatus(response);
+               LOGGER.info("Successfully returned after getting all the publishers" + topicName);
+       }
+
+
+       public void testConsumerForTopic() {
+               LOGGER.info("test case get all consumers for topic: " + topicName);
+               // creating topic to check
+               createTopic(topicName);
+               target = client.target(url);
+               target = target.path("/topics/");
+               target = target.path(topicName);
+               target = target.path("/consumers");
+               // checking all consumer for a particular topic
+               Response response = target.request().get();
+               keyInstance.assertStatus(response);
+               LOGGER.info("Successfully returned after getting all the consumers" + topicName);
+       }
+
+
+*/
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapAdminTest.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapAdminTest.java
new file mode 100644 (file)
index 0000000..f8dc500
--- /dev/null
@@ -0,0 +1,60 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dmaap;
+
+
+
+public class DmaapAdminTest {
+       /*private static final Logger LOGGER = Logger.getLogger(DmaapAdminTest.class);
+       Client client = ClientBuilder.newClient();
+       WebTarget target = client.target(LoadPropertyFile.getPropertyFileData().getProperty("url"));
+
+
+       public void assertStatus(Response response) {
+               assertTrue(response.getStatus() == HttpStatus.SC_OK);
+       }
+
+       // 1.get consumer cache
+       public void testConsumerCache() {
+               LOGGER.info("test case consumer cache");
+               target = target.path("/admin/consumerCache");
+               Response response = target.request().get();
+               assertStatus(response);
+               LOGGER.info("Successfully returned after fetching consumer cache");
+               InputStream is = (InputStream) response.getEntity();
+               Scanner s = new Scanner(is);
+               s.useDelimiter("\\A");
+               String data = s.next();
+               s.close();
+               LOGGER.info("Details of consumer cache :" + data);
+       }
+
+       // 2.drop consumer cache
+       public void testDropConsumerCache() {
+               LOGGER.info("test case drop consumer cache");
+               target = target.path("/admin/dropConsumerCache");
+               Response response = target.request().post(Entity.json(null));
+               assertStatus(response);
+               LOGGER.info("Successfully returned after dropping consumer cache");
+       }
+*/
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapApiKeyTest.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapApiKeyTest.java
new file mode 100644 (file)
index 0000000..569f257
--- /dev/null
@@ -0,0 +1,73 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dmaap;
+
+public class DmaapApiKeyTest {
+       /*
+       private static final Logger LOGGER = Logger.getLogger(DmaapApiKeyTest.class);
+       Client client = ClientBuilder.newClient();
+       Properties prop = LoadPropertyFile.getPropertyFileData();
+       String url = prop.getProperty("url");
+       WebTarget target = client.target(url);
+       String date = prop.getProperty("date");
+
+
+       public JSONObject returnKey(ApiKeyBean apikeybean) {
+               LOGGER.info("Call to return newly created key");
+               target = client.target(url);
+               target = target.path("/apiKeys/create");
+               Response response = target.request().post(Entity.json(apikeybean));
+               assertStatus(response);
+               LOGGER.info("successfully created keys");
+               InputStream is = (InputStream) response.getEntity();
+               Scanner s = new Scanner(is);
+               s.useDelimiter("\\A");
+               JSONObject dataObj = new JSONObject(s.next());
+               s.close();
+               LOGGER.info("key details :" + dataObj.toString());
+               return dataObj;
+       }
+
+
+       public void assertStatus(Response response) {
+               assertTrue(response.getStatus() == HttpStatus.SC_OK);
+       }
+
+       // 2. get Allkey details
+       public void testAllKey() {
+               LOGGER.info("test case get all key");
+               target = target.path("/apiKeys");
+               Response response = target.request().get();
+               assertStatus(response);
+               LOGGER.info("successfully returned after get all key");
+               InputStream is = (InputStream) response.getEntity();
+               Scanner s = new Scanner(is);
+               s.useDelimiter("\\A");
+               LOGGER.info("Details of key: " + s.next());
+               s.close();
+
+       }
+
+
+
+*/
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapMetricsTest.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapMetricsTest.java
new file mode 100644 (file)
index 0000000..d7e4e4e
--- /dev/null
@@ -0,0 +1,64 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dmaap;
+
+public class DmaapMetricsTest {
+       /*private static final Logger LOGGER = Logger.getLogger(DmaapMetricsTest.class);
+       Client client = ClientBuilder.newClient();
+       WebTarget target = client.target(LoadPropertyFile.getPropertyFileData().getProperty("url"));
+
+       public void assertStatus(Response response) {
+               assertTrue(response.getStatus() == HttpStatus.SC_OK);
+       }
+
+
+       // 1.get metrics
+       public void testMetrics() {
+               LOGGER.info("test case get all metrics");
+               target = target.path("/metrics");
+               Response response = target.request().get();
+               assertStatus(response);
+               LOGGER.info("successfully returned after fetching all metrics");
+               InputStream is = (InputStream) response.getEntity();
+               Scanner s = new Scanner(is);
+               s.useDelimiter("\\A");
+               String data = s.next();
+               s.close();
+               LOGGER.info("DmaapMetricTest Test all metrics" + data);
+       }
+
+       // 2.get metrics by name
+       public void testMetricsByName() {
+               LOGGER.info("test case get metrics by name");
+               target = target.path("/metrics/startTime");
+               Response response = target.request().get();
+               assertStatus(response);
+               LOGGER.info("successfully returned after fetching specific metrics");
+               InputStream is = (InputStream) response.getEntity();
+               Scanner s = new Scanner(is);
+               s.useDelimiter("\\A");
+               String data = s.next();
+               s.close();
+               LOGGER.info("DmaapMetricTest metrics by name" + data);
+       }
+*/
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..14f0f5a
--- /dev/null
@@ -0,0 +1,41 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dmaap;
+
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({ DMaapPubSubTest.class, DmaapApiKeyTest.class, DMaapTopicTest.class, DmaapMetricsTest.class,
+               DmaapAdminTest.class })
+public class JUnitTestSuite {
+       /*private static final Logger LOGGER = Logger.getLogger(DMaapTopicTest.class);
+
+
+       public static void main(String[] args) {
+               LOGGER.info("Running the test suite");
+               TestSuite tstSuite = new TestSuite();
+               LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+       }*/
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/LoadPropertyFile.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/LoadPropertyFile.java
new file mode 100644 (file)
index 0000000..cdfe80d
--- /dev/null
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dmaap;
+
+public class LoadPropertyFile {
+       /*private static final Logger LOGGER = Logger
+                       .getLogger(LoadPropertyFile.class);
+
+       static public Properties getPropertyFileData() {
+               Properties prop = new Properties();
+               LOGGER.info("loading the property file");
+               
+               try {
+                       InputStream inputStream = LoadPropertyFile.class.getClassLoader()
+                                       .getResourceAsStream("DMaaPUrl.properties");
+                       prop.load(inputStream);
+                       LOGGER.info("successfully loaded the property file");
+               } catch (IOException e) {
+                       LOGGER.error("Error while retrieving API keys: " + e);
+               }
+               return prop;
+       }*/
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/TestRunner.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/TestRunner.java
new file mode 100644 (file)
index 0000000..7b8247a
--- /dev/null
@@ -0,0 +1,37 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dmaap;
+
+public class TestRunner {
+       /*private static final Logger LOGGER = Logger.getLogger(TestRunner.class);
+
+
+       public static void main(String[] args) {
+               // TODO Auto-generated method stub
+               Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+               for (Failure failure : result.getFailures()) {
+                       LOGGER.info(failure.toString());
+               }
+               LOGGER.info(result.wasSuccessful());
+       }
+*/
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/TopicBean.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/TopicBean.java
new file mode 100644 (file)
index 0000000..b1950e2
--- /dev/null
@@ -0,0 +1,72 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+/**
+ * 
+ */
+package org.onap.dmaap.mr.test.dmaap;
+
+import java.io.Serializable;
+
+public class TopicBean implements Serializable {
+
+       /*
+        * private static final long serialVersionUID = -8620390377775457949L;
+        * private String topicName; private String description;
+        * 
+        * 
+        * private int partitionCount; private int replicationCount; private boolean
+        * transactionEnabled = false;
+        * 
+        * public boolean isTransactionEnabled() { return transactionEnabled; }
+        * 
+        * public void setTransactionEnabled(boolean transactionEnabled) {
+        * this.transactionEnabled = transactionEnabled; }
+        * 
+        * public TopicBean() { super(); }
+        * 
+        * public TopicBean(String topicName, String description, int
+        * partitionCount, int replicationCount, boolean transactionEnabled) {
+        * super(); this.topicName = topicName; this.description = description;
+        * this.partitionCount = partitionCount; this.replicationCount =
+        * replicationCount; this.transactionEnabled = transactionEnabled; }
+        * 
+        * public String getTopicName() { return topicName; }
+        * 
+        * public void setTopicName(String topicName) { this.topicName = topicName;
+        * }
+        * 
+        * public String getDescription() { return description; }
+        * 
+        * public void setDescription(String description) { this.description =
+        * description; }
+        * 
+        * public int getPartitionCount() { return partitionCount; }
+        * 
+        * public void setPartitionCount(int partitionCount) { this.partitionCount =
+        * partitionCount; }
+        * 
+        * public int getReplicationCount() { return replicationCount; }
+        * 
+        * public void setReplicationCount(int replicationCount) {
+        * this.replicationCount = replicationCount; }
+        */
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/ApiKeyBean.java b/src/test/java/org/onap/dmaap/mr/test/dme2/ApiKeyBean.java
new file mode 100644 (file)
index 0000000..96c9c78
--- /dev/null
@@ -0,0 +1,72 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dme2;
+
+import java.io.Serializable;
+
+public class ApiKeyBean implements Serializable {
+
+       private static final long serialVersionUID = -8219849086890567740L;
+
+       // private static final String KEY_CHARS =
+       // "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+
+       
+       private String email;
+       private String description;
+
+       public ApiKeyBean() {
+               super();
+       }
+
+       public ApiKeyBean(String email, String description) {
+               super();
+               this.email = email;
+               this.description = description;
+       }
+
+       public String getEmail() {
+               return email;
+       }
+
+       public void setEmail(String email) {
+               this.email = email;
+       }
+
+       public String getDescription() {
+               return description;
+       }
+
+       public void setDescription(String description) {
+               this.description = description;
+       }
+
+       /*
+        * public String getKey() { return generateKey(16); }
+        * 
+        * public String getSharedSecret() { return generateKey(24); }
+        * 
+        * private static String generateKey ( int length ) { return
+        * uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length ); }
+        */
+
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2AdminTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2AdminTest.java
new file mode 100644 (file)
index 0000000..91d897c
--- /dev/null
@@ -0,0 +1,141 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dme2;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Properties;
+import junit.framework.TestCase;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+public class DME2AdminTest extends TestCase {
+
+    private static final Logger LOGGER = LogManager.getLogger(DME2AdminTest.class);
+
+    protected String url;
+
+    protected Properties props;
+
+    protected HashMap<String, String> hm;
+
+    protected String methodType;
+
+    protected String contentType;
+
+    protected String user;
+
+    protected String password;
+
+    @Override
+    protected void setUp() throws Exception {
+        super.setUp();
+        System.setProperty("AFT_DME2_CLIENT_SSL_INCLUDE_PROTOCOLS", "SSLv3,TLSv1,TLSv1.1");
+        System.setProperty("AFT_DME2_CLIENT_IGNORE_SSL_CONFIG", "false");
+        System.setProperty("AFT_DME2_CLIENT_KEYSTORE_PASSWORD", "changeit");
+        this.props = LoadPropertyFile.getPropertyFileDataProducer();
+        String latitude = props.getProperty("Latitude");
+        String longitude = props.getProperty("Longitude");
+        String version = props.getProperty("Version");
+        String serviceName = props.getProperty("ServiceName");
+        serviceName = "mr/admin";
+        String env = props.getProperty("Environment");
+        String partner = props.getProperty("Partner");
+        String protocol = props.getProperty("Protocol");
+
+        methodType = props.getProperty("MethodTypeGet");
+        contentType = props.getProperty("contenttype");
+        user = props.getProperty("user");
+        password = props.getProperty("password");
+        this.url =
+            protocol + "://" + serviceName + "?" + "version=" + version + "&" + "envContext=" + env
+                + "&"
+                + "routeOffer=" + partner + "&partner=BOT_R";
+        LoadPropertyFile.loadAFTProperties(latitude, longitude);
+        hm = new HashMap<String, String>();
+        hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+        hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+        hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+    }
+
+    public void testGetConsumerCache() {
+        LOGGER.info("test case consumer cache started");
+        String subContextPath = props.getProperty("SubContextPathGetAdminConsumerCache");
+        try {
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(methodType);
+            sender.setSubContext(subContextPath);
+            sender.setPayload("");
+            sender.addHeader("Content-Type", contentType);
+
+            sender.addHeader("X-CambriaAuth", "user1:7J49YriFlyRgebyOsSJhZvY/C60=");
+            sender.addHeader("X-X-CambriaDate", "2016-10-18T09:56:04-05:00");
+
+            //sender.setCredentials(user, password);
+            sender.setHeaders(hm);
+            LOGGER.info("Getting consumer Cache");
+            String reply = sender.sendAndWait(5000L);
+            System.out.println(reply);
+            assertTrue(LoadPropertyFile.isValidJsonString(reply));
+            assertNotNull(reply);
+            LOGGER.info("response from consumer cache=" + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void ttestDropConsumerCache() {
+        LOGGER.info("Drom consumer cache initiated");
+        String subContextPath = props.getProperty("SubContextPathDropAdminConsumerCache");
+        try {
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(methodType);
+            sender.setSubContext(subContextPath);
+            sender.setPayload("");
+            sender.addHeader("Content-Type", contentType);
+            sender.setCredentials(user, password);
+            sender.setHeaders(hm);
+
+            LOGGER.info("Dropping consumer cache...........");
+            String reply = sender.sendAndWait(5000L);
+
+            // assertTrue(LoadPropertyFile.isValidJsonString(reply));
+            assertNotNull(reply);
+            LOGGER.info("response =" + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ApiKeyTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ApiKeyTest.java
new file mode 100644 (file)
index 0000000..087a7ab
--- /dev/null
@@ -0,0 +1,122 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dme2;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+import com.att.aft.dme2.internal.jackson.map.ObjectMapper;
+import junit.framework.TestCase;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Properties;
+
+public class DME2ApiKeyTest extends TestCase {
+
+    private static final Logger LOGGER = LogManager.getLogger(DME2ApiKeyTest.class);
+
+    protected String url;
+
+    protected Properties props;
+
+    @Override
+    protected void setUp() throws Exception {
+        super.setUp();
+        System.setProperty("AFT_DME2_CLIENT_SSL_INCLUDE_PROTOCOLS", "SSLv3,TLSv1,TLSv1.1");
+        System.setProperty("AFT_DME2_CLIENT_IGNORE_SSL_CONFIG", "false");
+        System.setProperty("AFT_DME2_CLIENT_KEYSTORE_PASSWORD", "changeit");
+        this.props = LoadPropertyFile.getPropertyFileDataProducer();
+        String latitude = props.getProperty("Latitude");
+        String longitude = props.getProperty("Longitude");
+        String version = props.getProperty("Version");
+        String serviceName = props.getProperty("ServiceName");
+        String env = props.getProperty("Environment");
+        String partner = props.getProperty("Partner");
+        String protocol = props.getProperty("Protocol");
+        this.url =
+            protocol + "://" + serviceName + "?" + "version=" + version + "&" + "envContext=" + env
+                + "&"
+                + "routeOffer=" + partner + "&partner=BOT_R";
+        LoadPropertyFile.loadAFTProperties(latitude, longitude);
+    }
+
+    public void testCreateKey() {
+        LOGGER.info("Create Key test case initiated");
+        ApiKeyBean apiKeyBean = new ApiKeyBean("user1@onap.com", "Creating Api Key.m");
+        System.out.println(url);
+        returnKey(apiKeyBean, url, props);
+    }
+
+    public String returnKey(ApiKeyBean apibean, String url, Properties props) {
+        String reply = null;
+        try {
+            LOGGER.info("Call to return key ");
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(props.getProperty("MethodTypePost"));
+            sender.setSubContext(props.getProperty("SubContextPathGetCreateKeys"));
+            String jsonStringApiBean = new ObjectMapper().writeValueAsString(apibean);
+            sender.setPayload(jsonStringApiBean);
+            sender.addHeader("content-type", props.getProperty("contenttype"));
+            sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+            LOGGER.info("creating ApiKey");
+            reply = sender.sendAndWait(5000L);
+            System.out.println("reply: " + reply);
+            assertTrue(LoadPropertyFile.isValidJsonString(reply));
+            LOGGER.info("response =" + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        return reply;
+    }
+
+    public void testGetAllKey() {
+        LOGGER.info("Test case Get All key initiated....");
+        try {
+            DME2Client sender = new DME2Client(new URI(this.url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(this.props.getProperty("MethodTypeGet"));
+            String subcontextPath = this.props.getProperty("SubContextPathGetApiKeys");
+            // sender.setSubContext(subcontextPath);
+            sender.setPayload("");
+            sender.addHeader("content-type", props.getProperty("contenttype"));
+            sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+            LOGGER.info("Fetching all keys");
+            String reply = sender.sendAndWait(5000L);
+            System.out.println(reply);
+            assertTrue(LoadPropertyFile.isValidJsonString(reply));
+            LOGGER.info("response =" + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerFilterTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerFilterTest.java
new file mode 100644 (file)
index 0000000..5398cb4
--- /dev/null
@@ -0,0 +1,91 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dme2;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+import junit.framework.TestCase;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URLEncoder;
+import java.util.HashMap;
+import java.util.Properties;
+
+public class DME2ConsumerFilterTest extends TestCase {
+
+    private static final Logger LOGGER = LogManager.getLogger(DME2ConsumerFilterTest.class);
+
+    public void testConsumerFilter() {
+        LOGGER.info("Test case consumer filter initiated");
+        Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+        String latitude = props.getProperty("Latitude");
+        String longitude = props.getProperty("Longitude");
+        String version = props.getProperty("Version");
+        String serviceName = props.getProperty("ServiceName");
+        String env = props.getProperty("Environment");
+        String partner = props.getProperty("Partner");
+        String protocol = props.getProperty("Protocol");
+        String methodType = props.getProperty("MethodTypeGet");
+        String user = props.getProperty("user");
+        String password = props.getProperty("password");
+        String contenttype = props.getProperty("contenttype");
+        String url =
+            protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version
+                + "/"
+                + "envContext=" + env + "/" + "partner=" + partner;
+        LoadPropertyFile.loadAFTProperties(latitude, longitude);
+        HashMap<String, String> hm = new HashMap<String, String>();
+        hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+        hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+        hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+
+        try {
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(methodType);
+            String subContextPathConsumer =
+                props.getProperty("SubContextPathConsumer") + props.getProperty("newTopic")
+                    + "/" + props.getProperty("group") + "/" + props.getProperty("id") + "?"
+                    + props.getProperty("filterType");
+
+            sender.setSubContext(URLEncoder.encode(subContextPathConsumer, "UTF-8"));
+            sender.setPayload("");
+
+            sender.addHeader("Content-Type", contenttype);
+            sender.setCredentials(user, password);
+            sender.setHeaders(hm);
+            LOGGER.info("Consuming Message for Filter");
+            String reply = sender.sendAndWait(5000L);
+            assertNotNull(reply);
+            LOGGER.info("Message received = " + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerTest.java
new file mode 100644 (file)
index 0000000..2322007
--- /dev/null
@@ -0,0 +1,90 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dme2;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+import junit.framework.TestCase;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Properties;
+
+public class DME2ConsumerTest extends TestCase {
+
+    private static final Logger LOGGER = LogManager.getLogger(DME2ConsumerTest.class);
+
+    public void testConsumer() {
+        LOGGER.info("Test case subcribing initiated");
+
+        Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+        String latitude = props.getProperty("Latitude");
+        String longitude = props.getProperty("Longitude");
+        String version = props.getProperty("Version");
+        String serviceName = props.getProperty("ServiceName");
+        String env = props.getProperty("Environment");
+        String partner = props.getProperty("Partner");
+        String protocol = props.getProperty("Protocol");
+        String methodType = props.getProperty("MethodTypeGet");
+        String user = props.getProperty("user");
+        String password = props.getProperty("password");
+        String contenttype = props.getProperty("contenttype");
+        String url =
+            protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version
+                + "/"
+                + "envContext=" + env + "/" + "partner=" + partner;
+        LoadPropertyFile.loadAFTProperties(latitude, longitude);
+        HashMap<String, String> hm = new HashMap<String, String>();
+        hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+        hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+        hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+        try {
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(methodType);
+            String subContextPathConsumer =
+                props.getProperty("SubContextPathConsumer") + props.getProperty("newTopic")
+                    + "/" + props.getProperty("group") + "/" + props.getProperty("id");
+            sender.setSubContext(subContextPathConsumer);
+            sender.setPayload("");
+
+            sender.addHeader("Content-Type", contenttype);
+            sender.setCredentials(user, password);
+            sender.setHeaders(hm);
+
+            LOGGER.info("Consuming Message");
+            String reply = sender.sendAndWait(5000L);
+
+            assertNotNull(reply);
+            LOGGER.info("Message received = " + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2MetricsTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2MetricsTest.java
new file mode 100644 (file)
index 0000000..1b92bda
--- /dev/null
@@ -0,0 +1,130 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dme2;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+import junit.framework.TestCase;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Properties;
+
+public class DME2MetricsTest extends TestCase {
+
+    private static final Logger LOGGER = LogManager.getLogger(DME2MetricsTest.class);
+
+    public void testGetMetrics() {
+        LOGGER.info("Test case get metrics initiated...");
+        Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+        String latitude = props.getProperty("Latitude");
+        String longitude = props.getProperty("Longitude");
+        String version = props.getProperty("Version");
+        String serviceName = props.getProperty("ServiceName");
+        String env = props.getProperty("Environment");
+        String partner = props.getProperty("Partner");
+        String subContextPath = props.getProperty("SubContextPathGetMetrics");
+        String protocol = props.getProperty("Protocol");
+        String methodType = props.getProperty("MethodTypeGet");
+        String user = props.getProperty("user");
+        String password = props.getProperty("password");
+        String contenttype = props.getProperty("contenttype");
+        String url =
+            protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version
+                + "/"
+                + "envContext=" + env + "/" + "partner=" + partner;
+        LoadPropertyFile.loadAFTProperties(latitude, longitude);
+        HashMap<String, String> hm = new HashMap<String, String>();
+        hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+        hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+        hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+        try {
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(methodType);
+            sender.setSubContext(subContextPath);
+            sender.setPayload("");
+            sender.addHeader("Content-Type", contenttype);
+            sender.setCredentials(user, password);
+            sender.setHeaders(hm);
+            LOGGER.info("Getting Metrics Details");
+            String reply = sender.sendAndWait(5000L);
+            assertTrue(LoadPropertyFile.isValidJsonString(reply));
+            LOGGER.info("response =" + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void testGetMetricsByName() {
+        LOGGER.info("Test case get metrics by name initiated");
+        Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+        String latitude = props.getProperty("Latitude");
+        String longitude = props.getProperty("Longitude");
+        String version = props.getProperty("Version");
+        String serviceName = props.getProperty("ServiceName");
+        String env = props.getProperty("Environment");
+        String partner = props.getProperty("Partner");
+        String subContextPath = props.getProperty("SubContextPathGetMetricsByName");
+        String protocol = props.getProperty("Protocol");
+        String methodType = props.getProperty("MethodTypeGet");
+        String user = props.getProperty("user");
+        String password = props.getProperty("password");
+        String contenttype = props.getProperty("contenttype");
+        String url =
+            protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version
+                + "/"
+                + "envContext=" + env + "/" + "partner=" + partner;
+        LoadPropertyFile.loadAFTProperties(latitude, longitude);
+        HashMap<String, String> hm = new HashMap<String, String>();
+        hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+        hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+        hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+        try {
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(methodType);
+            sender.setSubContext(subContextPath);
+            sender.setPayload("");
+            sender.addHeader("Content-Type", contenttype);
+            sender.setCredentials(user, password);
+            sender.setHeaders(hm);
+            LOGGER.info("Getting Metrics By name");
+            String reply = sender.sendAndWait(5000L);
+            assertTrue(LoadPropertyFile.isValidJsonString(reply));
+            LOGGER.info("response =" + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ProducerTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ProducerTest.java
new file mode 100644 (file)
index 0000000..375ced0
--- /dev/null
@@ -0,0 +1,101 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dme2;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+import com.att.aft.dme2.internal.jackson.map.ObjectMapper;
+import junit.framework.TestCase;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Properties;
+
+public class DME2ProducerTest extends TestCase {
+
+    private static final Logger LOGGER = LogManager.getLogger(DME2ProducerTest.class);
+
+    public void testProducer() {
+        DME2TopicTest topicTestObj = new DME2TopicTest();
+        Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+        String latitude = props.getProperty("Latitude");
+        String longitude = props.getProperty("Longitude");
+        String version = props.getProperty("Version");
+        String serviceName = props.getProperty("ServiceName");
+        String env = props.getProperty("Environment");
+        String partner = props.getProperty("Partner");
+        String protocol = props.getProperty("Protocol");
+        String url =
+            protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version
+                + "/"
+                + "envContext=" + env + "/" + "partner=" + partner;
+        LoadPropertyFile.loadAFTProperties(latitude, longitude);
+        HashMap<String, String> hm = new HashMap<String, String>();
+        hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+        hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+        hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+        // checking whether topic exist or not
+        if (!topicTestObj.topicExist(url, props, hm)) {
+            // if topic doesn't exist then create the topic
+            topicTestObj.createTopic(url, props, hm);
+            // after creating the topic publish on that topic
+            publishMessage(url, props, hm);
+        } else {
+            // if topic already exist start publishing on the topic
+            publishMessage(url, props, hm);
+        }
+
+    }
+
+    public void publishMessage(String url, Properties props, HashMap<String, String> mapData) {
+        try {
+            LOGGER.info("Call to publish message ");
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(props.getProperty("MethodTypePost"));
+            String subcontextpathPublish =
+                props.getProperty("SubContextPathproducer") + props.getProperty("newTopic");
+            sender.setSubContext(subcontextpathPublish);
+            String jsonStringApiBean = new ObjectMapper()
+                .writeValueAsString(new ApiKeyBean("example@att.com",
+                    "description"));
+            sender.setPayload(jsonStringApiBean);
+
+            sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+            sender.addHeader("content-type", props.getProperty("contenttype"));
+            LOGGER.info("Publishing message");
+            String reply = sender.sendAndWait(5000L);
+            // assertTrue(LoadPropertyFile.isValidJsonString(reply));
+            assertNotNull(reply);
+            LOGGER.info("response =" + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2TopicTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2TopicTest.java
new file mode 100644 (file)
index 0000000..ec03d26
--- /dev/null
@@ -0,0 +1,441 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dme2;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+import com.att.aft.dme2.internal.jackson.map.ObjectMapper;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Properties;
+import junit.framework.TestCase;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+public class DME2TopicTest extends TestCase {
+
+    private String latitude;
+    private String longitude;
+    private String version;
+    private String serviceName;
+    private String env;
+    private String partner;
+    private String protocol;
+    private String methodTypeGet;
+    private String methodTypePost;
+    private String methodTypeDelete;
+    private String methodTypePut;
+
+    private String user;
+    private String password;
+    private String contenttype;
+    private String subContextPathGetAllTopic;
+    private String subContextPathGetOneTopic;
+    private String SubContextPathCreateTopic;
+    private String SubContextPathGetPublisherl;
+    private String SubContextPathGetPublisher;
+    private String SubContextPathGetPermitPublisher;
+    private String SubContextPathGetConsumer;
+    private String SubContextPathGetPermitConsumer;
+    private static final Logger LOGGER = LogManager.getLogger(DME2TopicTest.class);
+
+    public void createTopic(String url, Properties props, HashMap<String, String> mapData) {
+        LOGGER.info("create topic method starts");
+        if (!topicExist(url, props, mapData)) {
+            LOGGER.info("creating a new topic");
+            try {
+                DME2Client sender = new DME2Client(new URI(url), 5000L);
+                sender.setAllowAllHttpReturnCodes(true);
+                sender.setMethod(props.getProperty("MethodTypePost"));
+                sender.setSubContext(props.getProperty("SubContextPathCreateTopic"));
+                TopicBeanDME2 topicBean = new TopicBeanDME2(props.getProperty("newTopic"),
+                    props.getProperty("topicDescription"),
+                    Integer.parseInt(props.getProperty("partition")),
+                    Integer.parseInt(props.getProperty("replication")), Boolean.valueOf(props
+                    .getProperty("txenabled")));
+                String jsonStringApiBean = new ObjectMapper().writeValueAsString(topicBean);
+                sender.setPayload(jsonStringApiBean);
+                sender.addHeader("content-type", props.getProperty("contenttype"));
+                sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+                LOGGER.info("creating Topic");
+                String reply = sender.sendAndWait(5000L);
+                assertTrue(LoadPropertyFile.isValidJsonString(reply));
+                LOGGER.info("response =" + reply);
+            } catch (DME2Exception e) {
+                e.printStackTrace();
+            } catch (URISyntaxException e) {
+                e.printStackTrace();
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+    public boolean topicExist(String url, Properties props, HashMap<String, String> mapData) {
+        boolean topicExist = false;
+        try {
+            LOGGER.info("Checking topic exists or not");
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(props.getProperty("MethodTypeGet"));
+            String subcontextPath =
+                props.getProperty("subContextPathGetOneTopic") + props.getProperty("newTopic");
+            sender.setSubContext(subcontextPath);
+            sender.setPayload("");
+            sender.addHeader("content-type", props.getProperty("contenttype"));
+            sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+            String reply = sender.sendAndWait(5000L);
+            topicExist = LoadPropertyFile.isValidJsonString(reply);
+            LOGGER.info("Topic exist =" + topicExist);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        return topicExist;
+    }
+
+    public void testAllTopics() {
+        LOGGER.info("Test case get all topics initiated");
+        Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+        latitude = props.getProperty("Latitude");
+        longitude = props.getProperty("Longitude");
+        version = props.getProperty("Version");
+        serviceName = props.getProperty("ServiceName");
+        env = props.getProperty("Environment");
+        partner = props.getProperty("Partner");
+        subContextPathGetAllTopic = props.getProperty("subContextPathGetAllTopic");
+        protocol = props.getProperty("Protocol");
+        methodTypeGet = props.getProperty("MethodTypeGet");
+        user = props.getProperty("user");
+        password = props.getProperty("password");
+        contenttype = props.getProperty("contenttype");
+        String url =
+            protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version
+                + "/"
+                + "envContext=" + env + "/" + "partner=" + partner;
+        LoadPropertyFile.loadAFTProperties(latitude, longitude); // } else {
+        HashMap<String, String> hm = new HashMap<String, String>();
+        hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+        hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+        hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+        try {
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(methodTypeGet);
+            sender.setSubContext(subContextPathGetAllTopic);
+            sender.setPayload("");
+
+            sender.addHeader("Content-Type", contenttype);
+            sender.setCredentials(user, password);
+            sender.setHeaders(hm);
+
+            LOGGER.info("Retrieving all topics");
+            String reply = sender.sendAndWait(5000L);
+            assertTrue(LoadPropertyFile.isValidJsonString(reply));
+            LOGGER.info("All Topics details = " + reply);
+
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void testOneTopic() {
+        LOGGER.info("Test case get one topic initiated");
+        Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+        latitude = props.getProperty("Latitude");
+        longitude = props.getProperty("Longitude");
+        version = props.getProperty("Version");
+        serviceName = props.getProperty("ServiceName");
+        env = props.getProperty("Environment");
+        partner = props.getProperty("Partner");
+        subContextPathGetOneTopic = props.getProperty("subContextPathGetOneTopic");
+        protocol = props.getProperty("Protocol");
+        methodTypeGet = props.getProperty("MethodTypeGet");
+        user = props.getProperty("user");
+        password = props.getProperty("password");
+        contenttype = props.getProperty("contenttype");
+        String url =
+            protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version
+                + "/"
+                + "envContext=" + env + "/" + "partner=" + partner;
+        LoadPropertyFile.loadAFTProperties(latitude, longitude);
+
+        HashMap<String, String> hm = new HashMap<String, String>();
+        hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+        hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+        hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+        System.out.println("Retrieving topic detail");
+        if (!topicExist(url, props, hm)) {
+            createTopic(url, props, hm);
+        } else {
+            assertTrue(true);
+        }
+    }
+
+    public void createTopicForDeletion(String url, Properties props,
+        HashMap<String, String> mapData) {
+        LOGGER.info("create topic method starts");
+        LOGGER.info("creating a new topic for deletion");
+        try {
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(props.getProperty("MethodTypePost"));
+            sender.setSubContext(props.getProperty("SubContextPathCreateTopic"));
+            TopicBeanDME2 topicBean = new TopicBeanDME2(props.getProperty("deleteTopic"),
+                props.getProperty("topicDescription"),
+                Integer.parseInt(props.getProperty("partition")),
+                Integer.parseInt(props.getProperty("replication")),
+                Boolean.valueOf(props.getProperty("txenabled")));
+            String jsonStringApiBean = new ObjectMapper().writeValueAsString(topicBean);
+            sender.setPayload(jsonStringApiBean);
+            sender.addHeader("content-type", props.getProperty("contenttype"));
+            sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+
+            LOGGER.info("creating Topic");
+            String reply = sender.sendAndWait(5000L);
+            assertTrue(LoadPropertyFile.isValidJsonString(reply));
+            LOGGER.info("response =" + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    public boolean topicExistForDeletion(String url, Properties props,
+        HashMap<String, String> mapData) {
+        boolean topicExist = false;
+        try {
+            LOGGER.info("Checking topic exists for deletion");
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(props.getProperty("MethodTypeGet"));
+            String subcontextPath =
+                props.getProperty("subContextPathGetOneTopic") + props.getProperty("deleteTopic");
+            sender.setSubContext(subcontextPath);
+            sender.setPayload("");
+            sender.addHeader("content-type", props.getProperty("contenttype"));
+            sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+            String reply = sender.sendAndWait(5000L);
+            topicExist = LoadPropertyFile.isValidJsonString(reply);
+            LOGGER.info("Topic exist for deletion=" + topicExist);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        return topicExist;
+    }
+
+    public void testDeleteTopic() {
+        Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+        latitude = props.getProperty("Latitude");
+        longitude = props.getProperty("Longitude");
+        version = props.getProperty("Version");
+        serviceName = props.getProperty("ServiceName");
+        env = props.getProperty("Environment");
+        partner = props.getProperty("Partner");
+        SubContextPathCreateTopic = props.getProperty("SubContextPathCreateTopic");
+        protocol = props.getProperty("Protocol");
+        methodTypePost = props.getProperty("MethodTypeDelete");
+        user = props.getProperty("user");
+        password = props.getProperty("password");
+        contenttype = props.getProperty("contenttypejson");
+        String url =
+            protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version
+                + "/"
+                + "envContext=" + env + "/" + "partner=" + partner;
+        LoadPropertyFile.loadAFTProperties(latitude, longitude);
+        HashMap<String, String> hm = new HashMap<String, String>();
+        hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+        hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+        hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+        System.out.println("deleteing topic");
+        if (!topicExistForDeletion(url, props, hm)) {
+            createTopicForDeletion(url, props, hm);
+            deleteTopic(url, props, hm);
+        } else {
+            deleteTopic(url, props, hm);
+        }
+    }
+
+    public void deleteTopic(String url, Properties props, HashMap<String, String> mapData) {
+        try {
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(props.getProperty("MethodTypeDelete"));
+            String subsontextPathDelete = props.getProperty("subContextPathGetOneTopic")
+                + props.getProperty("deleteTopic");
+            sender.setSubContext(subsontextPathDelete);
+            sender.setPayload("");
+            sender.addHeader("content-type", props.getProperty("contenttype"));
+            sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+            System.out.println("Deleting Topic " + props.getProperty("deleteTopic"));
+            String reply = sender.sendAndWait(5000L);
+            assertNotNull(reply);
+            System.out.println("response =" + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void testGetProducersTopics() {
+        LOGGER.info("Test case get list of producers on topic");
+        Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+        latitude = props.getProperty("Latitude");
+        longitude = props.getProperty("Longitude");
+        version = props.getProperty("Version");
+        serviceName = props.getProperty("ServiceName");
+        env = props.getProperty("Environment");
+        partner = props.getProperty("Partner");
+        SubContextPathGetPublisher = props.getProperty("SubContextPathGetPublisher");
+        protocol = props.getProperty("Protocol");
+        methodTypeGet = props.getProperty("MethodTypeGet");
+        user = props.getProperty("user");
+        password = props.getProperty("password");
+        contenttype = props.getProperty("contenttype");
+        String url =
+            protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version
+                + "/"
+                + "envContext=" + env + "/" + "partner=" + partner;
+        LoadPropertyFile.loadAFTProperties(latitude, longitude);
+
+        HashMap<String, String> hm = new HashMap<String, String>();
+        hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+        hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+        hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+        try {
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(methodTypeGet);
+            sender.setSubContext(SubContextPathGetPublisher);
+            sender.setPayload("");
+
+            sender.addHeader("Content-Type", contenttype);
+            sender.setCredentials(user, password);
+            sender.setHeaders(hm);
+
+            LOGGER.info("Retrieving List of publishers");
+            String reply = sender.sendAndWait(5000L);
+            assertTrue(LoadPropertyFile.isValidJsonString(reply));
+            LOGGER.info("All Publishers details = " + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void testGetConsumersTopics() {
+        LOGGER.info("Test case get list of consumers on topic ");
+        Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+        latitude = props.getProperty("Latitude");
+        longitude = props.getProperty("Longitude");
+        version = props.getProperty("Version");
+        serviceName = props.getProperty("ServiceName");
+        env = props.getProperty("Environment");
+        partner = props.getProperty("Partner");
+        SubContextPathGetConsumer = props.getProperty("SubContextPathGetConsumer");
+        protocol = props.getProperty("Protocol");
+        methodTypeGet = props.getProperty("MethodTypeGet");
+        user = props.getProperty("user");
+        password = props.getProperty("password");
+        contenttype = props.getProperty("contenttype");
+        String url =
+            protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version
+                + "/"
+                + "envContext=" + env + "/" + "partner=" + partner;
+        LoadPropertyFile.loadAFTProperties(latitude, longitude);
+
+        HashMap<String, String> hm = new HashMap<String, String>();
+        hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+        hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+        hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+        try {
+            DME2Client sender = new DME2Client(new URI(url), 5000L);
+            sender.setAllowAllHttpReturnCodes(true);
+            sender.setMethod(methodTypeGet);
+            sender.setSubContext(SubContextPathGetConsumer);
+            sender.setPayload("");
+
+            sender.addHeader("Content-Type", contenttype);
+            sender.setCredentials(user, password);
+            sender.setHeaders(hm);
+
+            LOGGER.info("Retrieving consumer details on topics");
+            String reply = sender.sendAndWait(5000L);
+            assertTrue(LoadPropertyFile.isValidJsonString(reply));
+            System.out.println("Reply from server = " + reply);
+        } catch (DME2Exception e) {
+            e.printStackTrace();
+        } catch (URISyntaxException e) {
+            e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void testCreateTopic() {
+        LOGGER.info("Test case create topic starts");
+        Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+        latitude = props.getProperty("Latitude");
+        longitude = props.getProperty("Longitude");
+        version = props.getProperty("Version");
+        serviceName = props.getProperty("ServiceName");
+        env = props.getProperty("Environment");
+        partner = props.getProperty("Partner");
+        SubContextPathCreateTopic = props.getProperty("SubContextPathCreateTopic");
+        protocol = props.getProperty("Protocol");
+        methodTypePost = props.getProperty("MethodTypePost");
+        user = props.getProperty("user");
+        password = props.getProperty("password");
+        contenttype = props.getProperty("contenttypejson");
+        String url =
+            protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version
+                + "/"
+                + "envContext=" + env + "/" + "partner=" + partner;
+        LoadPropertyFile.loadAFTProperties(latitude, longitude);
+        HashMap<String, String> hm = new HashMap<String, String>();
+        hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+        hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+        hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+        createTopic(url, props, hm);
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/test/dme2/JUnitTestSuite.java
new file mode 100644 (file)
index 0000000..bab1be3
--- /dev/null
@@ -0,0 +1,44 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dme2;
+
+import junit.framework.TestSuite;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+
+@RunWith(Suite.class)
+@SuiteClasses({DME2AdminTest.class, DME2ApiKeyTest.class, DME2ConsumerTest.class,
+    DME2ConsumerTest.class, DME2MetricsTest.class, DME2ProducerTest.class, DME2TopicTest.class,})
+public class JUnitTestSuite {
+
+    private static final Logger LOGGER = LogManager.getLogger(JUnitTestSuite.class);
+
+    public static void main(String[] args) {
+        LOGGER.info("Running the test suite");
+
+        TestSuite tstSuite = new TestSuite();
+        LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/LoadPropertyFile.java b/src/test/java/org/onap/dmaap/mr/test/dme2/LoadPropertyFile.java
new file mode 100644 (file)
index 0000000..5c95f2f
--- /dev/null
@@ -0,0 +1,69 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dme2;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONObject;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+public class LoadPropertyFile {
+       //private static final Logger LOGGER = Logger.getLogger(LoadPropertyFile.class);
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(LoadPropertyFile.class);
+
+       static public Properties getPropertyFileDataProducer() {
+               Properties prop = new Properties();
+               LOGGER.info("loading the property file");
+               try {
+                       InputStream inputStream = LoadPropertyFile.class.getClassLoader()
+                                       .getResourceAsStream("dme2testcase.properties");
+                       
+                       prop.load(inputStream);
+                       LOGGER.info("successfully loaded the property file");
+               } catch (IOException e) {
+                       LOGGER.error("Error while retrieving API keys: " + e);
+               }
+               return prop;
+       }
+
+       static public void loadAFTProperties(String lat, String longi) {
+               System.setProperty("AFT_LATITUDE", lat);
+               System.setProperty("AFT_LONGITUDE", longi);
+               System.setProperty("AFT_ENVIRONMENT", "AFTUAT");
+               // printProperties();
+               System.out.println("Latitude =" + lat);
+               System.out.println("Longitude =" + longi);
+       }
+
+       static public boolean isValidJsonString(String chkString) {
+               boolean isJson = true;
+               try {
+                       new JSONObject(chkString);
+               } catch (Exception e) {
+                       isJson = false;
+               }
+               return isJson;
+       }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/TestRunner.java b/src/test/java/org/onap/dmaap/mr/test/dme2/TestRunner.java
new file mode 100644 (file)
index 0000000..5e6e1c3
--- /dev/null
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.mr.test.dme2;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+
+public class TestRunner {
+
+    private static final Logger LOGGER = LogManager.getLogger(TestRunner.class);
+
+    public static void main(String[] args) {
+        // TODO Auto-generated method stub
+        Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+        for (Failure failure : result.getFailures()) {
+            LOGGER.info(failure.toString());
+        }
+        LOGGER.info(result.wasSuccessful());
+    }
+}
diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/TopicBeanDME2.java b/src/test/java/org/onap/dmaap/mr/test/dme2/TopicBeanDME2.java
new file mode 100644 (file)
index 0000000..597546b
--- /dev/null
@@ -0,0 +1,94 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+/**
+ * 
+ */
+package org.onap.dmaap.mr.test.dme2;
+
+import java.io.Serializable;
+
+public class TopicBeanDME2 implements Serializable {
+
+       private static final long serialVersionUID = -8620390377775457949L;
+       private String topicName;
+       private String description;
+
+       
+       private int partitionCount;
+       private int replicationCount;
+       private boolean transactionEnabled = false;
+
+       public boolean isTransactionEnabled() {
+               return transactionEnabled;
+       }
+
+       public void setTransactionEnabled(boolean transactionEnabled) {
+               this.transactionEnabled = transactionEnabled;
+       }
+
+       public TopicBeanDME2() {
+               super();
+       }
+
+       public TopicBeanDME2(String topicName, String description, int partitionCount, int replicationCount,
+                       boolean transactionEnabled) {
+               super();
+               this.topicName = topicName;
+               this.description = description;
+               this.partitionCount = partitionCount;
+               this.replicationCount = replicationCount;
+               this.transactionEnabled = transactionEnabled;
+       }
+
+       public String getTopicName() {
+               return topicName;
+       }
+
+       public void setTopicName(String topicName) {
+               this.topicName = topicName;
+       }
+
+       public String getDescription() {
+               return description;
+       }
+
+       public void setDescription(String description) {
+               this.description = description;
+       }
+
+       public int getPartitionCount() {
+               return partitionCount;
+       }
+
+       public void setPartitionCount(int partitionCount) {
+               this.partitionCount = partitionCount;
+       }
+
+       public int getReplicationCount() {
+               return replicationCount;
+       }
+
+       public void setReplicationCount(int replicationCount) {
+               this.replicationCount = replicationCount;
+       }
+
+}
index 2f3098d..a3c123b 100644 (file)
 
  package org.onap.dmaap.service;
 
-import static org.junit.Assert.*;
-
-import static org.mockito.Matchers.anyString;
+import static org.junit.Assert.assertTrue;
 import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.when;
 
+import com.att.ajsc.beans.PropertiesMapBean;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
 import java.io.IOException;
-import java.util.ConcurrentModificationException;
-
 import javax.servlet.ServletOutputStream;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-
-import org.junit.After;
 import org.json.JSONArray;
 import org.json.JSONException;
 import org.json.JSONObject;
+import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
-
 import org.junit.runner.RunWith;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.MockitoAnnotations;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PowerMockIgnore;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-import com.att.ajsc.beans.PropertiesMapBean;
 import org.onap.dmaap.dmf.mr.CambriaApiException;
 import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
 import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker;
@@ -65,16 +60,15 @@ import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator;
 import org.onap.dmaap.dmf.mr.service.TopicService;
 import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
 import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.NsaAcl;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 
 //@RunWith(MockitoJUnitRunner.class)
 @RunWith(PowerMockRunner.class)
 @PowerMockIgnore("jdk.internal.reflect.*")
-@PrepareForTest({ PropertiesMapBean.class })
+@PrepareForTest({ PropertiesMapBean.class, DMaaPResponseBuilder.class })
 public class TopicRestServiceTest {
 
        @InjectMocks
diff --git a/src/test/resources/DMaaPErrorMesaages.properties b/src/test/resources/DMaaPErrorMesaages.properties
new file mode 100644 (file)
index 0000000..a3d6ce7
--- /dev/null
@@ -0,0 +1,59 @@
+###############################################################################
+#  ============LICENSE_START=======================================================
+#  org.onap.dmaap
+#  ================================================================================
+#  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+#  ================================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#        http://www.apache.org/licenses/LICENSE-2.0
+#  
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=========================================================
+#
+#  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#  
+###############################################################################
+###############################################################################
+##
+## DMaaP Error Messages
+###############################################################################
+
+##
+# Generic WebApplication Exceptions
+##
+resource.not.found=The requested resource was not found.Please verify the URL and try again.
+server.unavailable=Server is temporarily unavailable or busy.Try again later, or try another server in the cluster.
+http.method.not.allowed=The specified HTTP method is not allowed for the requested resource.Enter a valid HTTP method and try again.
+incorrect.request.json=Incorrect JSON object. Please correct the JSON format and try again.
+network.time.out=Connection to the DMaaP MR was timed out.Please try again.
+
+##
+# AAF Errors
+##
+authentication.failure=Access Denied: Invalid Credentials. Enter a valid MechId and Password and try again.
+not.permitted.access.1=Access Denied.User does not have permission to perform
+not.permitted.access.2=operation on Topic:
+unable.to.authorize=Unable to authorize the user. Please try again later.
+
+
+##
+#Topic
+##
+get.topic.failure=Failed to retrieve list of all topics.
+get.topic.details.failure=Failed to retrieve details of topic:
+create.topic.failure=Failed to create topic:
+delete.topic.failure=Failed to delete topic:
+
+consume.msg.error=Error while reading data from topic.
+publish.msg.error=Error while publishing data to topic.
+msg_size_exceeds=Message size exceeds the default size.
+publish.msg.count=Successfully published number of messages :
+
+incorrect.json=Incorrect JSON object.Could not parse JSON. Please correct the JSON format and try again.
+topic.not.exist=No such topic exists.
\ No newline at end of file
diff --git a/src/test/resources/MsgRtrApi.properties b/src/test/resources/MsgRtrApi.properties
new file mode 100644 (file)
index 0000000..3aef922
--- /dev/null
@@ -0,0 +1,169 @@
+###############################################################################
+#  ============LICENSE_START=======================================================
+#  org.onap.dmaap
+#  ================================================================================
+#  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+#  ================================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#        http://www.apache.org/licenses/LICENSE-2.0
+#  
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=========================================================
+#
+#  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#  
+###############################################################################
+###############################################################################
+##
+## Cambria API Server config
+##
+##     - Default values are shown as commented settings.
+##
+
+###############################################################################
+##
+## HTTP service
+##
+##             - 3904 is standard as of 7/29/14.
+#
+## Zookeeper Connection
+##
+##     Both Cambria and Kafka make use of Zookeeper.
+##
+#config.zk.servers=172.18.1.1
+config.zk.servers=<zookeeper_host>
+#config.zk.root=/fe3c/cambria/config
+
+
+###############################################################################
+##
+## Kafka Connection
+##
+##     Items below are passed through to Kafka's producer and consumer
+##     configurations (after removing "kafka.")
+##     if you want to change request.required.acks it can take this one value
+#kafka.metadata.broker.list=localhost:9092,localhost:9093
+kafka.metadata.broker.list=<kafka_host>:<kafka_port>
+##kafka.request.required.acks=-1
+#kafka.client.zookeeper=${config.zk.servers}
+consumer.timeout.ms=100
+zookeeper.connection.timeout.ms=6000
+zookeeper.session.timeout.ms=20000
+zookeeper.sync.time.ms=2000
+auto.commit.interval.ms=1000
+fetch.message.max.bytes =1000000
+auto.commit.enable=false
+
+#(backoff*retries > zksessiontimeout)
+kafka.rebalance.backoff.ms=10000
+kafka.rebalance.max.retries=6
+
+
+###############################################################################
+##
+##     Secured Config
+##
+##     Some data stored in the config system is sensitive -- API keys and secrets,
+##     for example. to protect it, we use an encryption layer for this section
+##     of the config.
+##
+## The key is a base64 encode AES key. This must be created/configured for
+## each installation.
+#cambria.secureConfig.key=
+##
+## The initialization vector is a 16 byte value specific to the secured store.
+## This must be created/configured for each installation.
+#cambria.secureConfig.iv=
+
+## Southfield Sandbox
+cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q==
+cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw==
+authentication.adminSecret=fe3cCompound
+#cambria.secureConfig.key[pc569h]=YT3XPyxEmKCTLI2NK+Sjbw==
+#cambria.secureConfig.iv[pc569h]=rMm2jhR3yVnU+u2V9Ugu3Q==
+
+
+###############################################################################
+##
+## Consumer Caching
+##
+##     Kafka expects live connections from the consumer to the broker, which
+##     obviously doesn't work over connectionless HTTP requests. The Cambria
+##     server proxies HTTP requests into Kafka consumer sessions that are kept
+##     around for later re-use. Not doing so is costly for setup per request,
+##     which would substantially impact a high volume consumer's performance.
+##
+##     This complicates Cambria server failover, because we often need server
+##     A to close its connection before server B brings up the replacement.    
+##
+
+## The consumer cache is normally enabled.
+#cambria.consumer.cache.enabled=true
+
+## Cached consumers are cleaned up after a period of disuse. The server inspects
+## consumers every sweepFreqSeconds and will clean up any connections that are
+## dormant for touchFreqMs.
+#cambria.consumer.cache.sweepFreqSeconds=15
+cambria.consumer.cache.touchFreqMs=120000
+##stickforallconsumerrequests=false
+## The cache is managed through ZK. The default value for the ZK connection
+## string is the same as config.zk.servers.
+#cambria.consumer.cache.zkConnect=${config.zk.servers}
+
+##
+## Shared cache information is associated with this node's name. The default
+## name is the hostname plus the HTTP service port this host runs on. (The
+## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(),
+## which is not always adequate.) You can set this value explicitly here.
+##
+#cambria.api.node.identifier=<use-something-unique-to-this-instance>
+
+#cambria.rateLimit.maxEmptyPollsPerMinute=30
+#cambria.rateLimitActual.delay.ms=10
+
+###############################################################################
+##
+## Metrics Reporting
+##
+##     This server can report its metrics periodically on a topic.
+##
+#metrics.send.cambria.enabled=true
+#metrics.send.cambria.topic=cambria.apinode.metrics                                  #msgrtr.apinode.metrics.dmaap 
+#metrics.send.cambria.sendEverySeconds=60
+
+cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache
+consumer.timeout=17
+
+##############################################################################
+#100mb
+maxcontentlength=10000
+
+
+##############################################################################
+#AAF Properties
+msgRtr.namespace.aaf=org.onap.dmaap.mr.topic
+msgRtr.topicfactory.aaf=org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:
+enforced.topic.name.AAF=org.onap.dmaap.mr
+forceAAF=false
+transidUEBtopicreqd=false
+defaultNSforUEB=org.onap.dmaap.mr
+##############################################################################
+#Mirror Maker Agent
+msgRtr.mirrormakeradmin.aaf=org.onap.dmaap.mr.mirrormaker|*|admin
+msgRtr.mirrormakeruser.aaf=org.onap.dmaap.mr.mirrormaker|*|user
+msgRtr.mirrormakeruser.aaf.create=org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:
+msgRtr.mirrormaker.timeout=15000
+msgRtr.mirrormaker.topic=org.onap.dmaap.mr.mmagent
+msgRtr.mirrormaker.consumergroup=mmagentserver
+msgRtr.mirrormaker.consumerid=1
+
+kafka.max.poll.interval.ms=300000
+kafka.heartbeat.interval.ms=60000
+kafka.session.timeout.ms=240000
+kafka.max.poll.records=1000
\ No newline at end of file
diff --git a/src/test/resources/spring-context.xml b/src/test/resources/spring-context.xml
new file mode 100644 (file)
index 0000000..717dddf
--- /dev/null
@@ -0,0 +1,120 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+     ============LICENSE_START=======================================================
+     org.onap.dmaap
+     ================================================================================
+     Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+     ================================================================================
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+           http://www.apache.org/licenses/LICENSE-2.0
+     
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+     ============LICENSE_END=========================================================
+   
+     ECOMP is a trademark and service mark of AT&T Intellectual Property.
+     
+ -->
+
+<beans xmlns="http://www.springframework.org/schema/beans"
+       xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:mvc="http://www.springframework.org/schema/mvc"
+       xmlns:context="http://www.springframework.org/schema/context"
+       xsi:schemaLocation="
+        http://www.springframework.org/schema/mvc http://www.springframework.org/schema/mvc/spring-mvc-3.0.xsd
+        http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
+        http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd">
+
+       <!-- Dependency Injection with annotations -->
+       <context:component-scan
+               base-package="com.att.nsa.cambria.utils,com.att.nsa.cambria.service.impl,com.att.nsa.cambria.exception" />
+
+       <context:property-placeholder
+               location="classpath:msgRtrApi.properties,classpath:DMaaPErrorMesaages.properties" />
+
+
+       <bean id="propertyReader" class="com.att.nsa.cambria.utils.PropertyReader" />
+       <bean
+               class="org.springframework.beans.factory.config.MethodInvokingFactoryBean">
+               <!-- Next value is the full qualified name of the static setter including 
+                       method name -->
+               <property name="staticMethod"
+                       value="com.att.nsa.cambria.beans.DMaaPKafkaConsumerFactory.populateKafkaInternalDefaultsMap" />
+               <property name="arguments">
+                       <list>
+                               <ref bean="propertyReader" />
+                       </list>
+               </property>
+       </bean>
+
+       <bean id="drumlinRequestRouter"
+               class="com.att.nsa.drumlin.service.framework.routing.DrumlinRequestRouter" />
+
+       <bean id="dMaaPMetricsSet" class="com.att.nsa.cambria.beans.DMaaPMetricsSet">
+               <constructor-arg ref="propertyReader" />
+       </bean>
+
+       <bean id="dMaaPZkClient" class=" com.att.nsa.cambria.beans.DMaaPZkClient">
+               <constructor-arg ref="propertyReader" />
+       </bean>
+
+       <bean id="dMaaPZkConfigDb" class=" com.att.nsa.cambria.beans.DMaaPZkConfigDb">
+               <constructor-arg ref="dMaaPZkClient" />
+               <constructor-arg ref="propertyReader" />
+       </bean>
+
+       <bean id="kafkaPublisher" class=" com.att.nsa.cambria.backends.kafka.KafkaPublisher">
+               <constructor-arg ref="propertyReader" />
+       </bean>
+
+       <bean id="dMaaPKafkaConsumerFactory" class=" com.att.nsa.cambria.beans.DMaaPKafkaConsumerFactory">
+               <constructor-arg ref="propertyReader" />
+               <constructor-arg ref="dMaaPMetricsSet" />
+               <constructor-arg ref="curator" />
+       </bean>
+
+       <bean id="curator" class="com.att.nsa.cambria.utils.DMaaPCuratorFactory"
+               factory-method="getCurator">
+               <constructor-arg ref="propertyReader" />
+       </bean>
+
+       <bean id="dMaaPKafkaMetaBroker" class=" com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker">
+               <constructor-arg ref="propertyReader" />
+               <constructor-arg ref="dMaaPZkClient" />
+               <constructor-arg ref="dMaaPZkConfigDb" />
+       </bean>
+
+       <!-- <bean id="q" class=" com.att.nsa.cambria.backends.memory.MemoryQueue" />
+
+       <bean id="mmb" class=" com.att.nsa.cambria.backends.memory.MemoryMetaBroker">
+               <constructor-arg ref="q" />
+               <constructor-arg ref="dMaaPZkConfigDb" />
+       <constructor-arg ref="propertyReader" />
+       </bean>
+
+       <bean id="dMaaPNsaApiDb" class="com.att.nsa.cambria.beans.DMaaPNsaApiDb"
+               factory-method="buildApiKeyDb">
+               <constructor-arg ref="propertyReader" />
+               <constructor-arg ref="dMaaPZkConfigDb" />
+       </bean>
+
+       <bean id="dMaaPTranDb" class="com.att.nsa.cambria.transaction.DMaaPTransactionDB" 
+               factory-method="buildTransactionDb"> <constructor-arg ref="propertyReader" 
+               /> <constructor-arg ref="dMaaPZkConfigDb" /> </bean>
+
+       <bean id="dMaaPAuthenticatorImpl" class="com.att.nsa.cambria.security.DMaaPAuthenticatorImpl">
+               <constructor-arg ref="dMaaPNsaApiDb" />
+       </bean>
+       <bean id="defLength" class="com.att.nsa.filter.DefaultLength">
+               <property name="defaultLength" value="${maxcontentlength}"></property>
+       </bean> -->
+
+       <!-- <bean class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer"> 
+               <property name="location"> <value>msgRtrApi.properties</value> </property> 
+               </bean> -->
+
+</beans>
\ No newline at end of file
index 768c33a..d8e24cd 100644 (file)
@@ -5,16 +5,16 @@
 # Copyright (C) 2017 AT&T Intellectual Property. All rights
 #                             reserved.
 # ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License"); 
-# you may not use this file except in compliance with the License. 
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
-# 
+#
 # http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing, software 
-# distributed under the License is distributed on an "AS IS" BASIS, 
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
-# See the License for the specific language governing permissions and 
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
 # limitations under the License.
 # ============LICENSE_END============================================
 # ===================================================================
@@ -26,8 +26,8 @@
 # because they are used in Jenkins, whose plug-in doesn't support
 
 major=1
-minor=2
-patch=20
+minor=3
+patch=0
 
 base_version=${major}.${minor}.${patch}