-/*\r
- * ===============================LICENSE_START======================================\r
- * dcae-analytics\r
- * ================================================================================\r
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * ================================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- *\r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- *\r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============================LICENSE_END===========================================\r
- */\r
-\r
-package org.openecomp.dcae.apod.analytics.cdap.plugins.batch.sink.dmaap;\r
-\r
-import co.cask.cdap.api.annotation.Description;\r
-import co.cask.cdap.api.annotation.Name;\r
-import co.cask.cdap.api.annotation.Plugin;\r
-import co.cask.cdap.api.data.batch.Output;\r
-import co.cask.cdap.api.data.format.StructuredRecord;\r
-import co.cask.cdap.api.data.schema.Schema;\r
-import co.cask.cdap.api.dataset.lib.KeyValue;\r
-import co.cask.cdap.etl.api.Emitter;\r
-import co.cask.cdap.etl.api.PipelineConfigurer;\r
-import co.cask.cdap.etl.api.batch.BatchSink;\r
-import co.cask.cdap.etl.api.batch.BatchSinkContext;\r
-import org.apache.hadoop.io.NullWritable;\r
-import org.openecomp.dcae.apod.analytics.cdap.common.utils.ValidationUtils;\r
-import org.openecomp.dcae.apod.analytics.cdap.plugins.domain.config.dmaap.DMaaPMRSinkPluginConfig;\r
-import org.openecomp.dcae.apod.analytics.cdap.plugins.utils.CDAPPluginUtils;\r
-import org.openecomp.dcae.apod.analytics.cdap.plugins.validator.DMaaPMRSinkPluginConfigValidator;\r
-import org.slf4j.Logger;\r
-import org.slf4j.LoggerFactory;\r
-\r
-/**\r
- * @author Rajiv Singla . Creation Date: 1/26/2017.\r
- */\r
-@Plugin(type = BatchSink.PLUGIN_TYPE)\r
-@Name("DMaaPMRSink")\r
-@Description("A batch sink Plugin that publishes messages to DMaaP MR Topic.")\r
-public class DMaaPMRSink extends BatchSink<StructuredRecord, String, NullWritable> {\r
-\r
- private static final Logger LOG = LoggerFactory.getLogger(DMaaPMRSink.class);\r
-\r
- private final DMaaPMRSinkPluginConfig pluginConfig;\r
-\r
- public DMaaPMRSink(final DMaaPMRSinkPluginConfig pluginConfig) {\r
- LOG.debug("Creating DMaaP MR Sink Plugin with plugin Config: {}", pluginConfig);\r
- this.pluginConfig = pluginConfig;\r
- }\r
-\r
- @Override\r
- public void configurePipeline(final PipelineConfigurer pipelineConfigurer) {\r
- super.configurePipeline(pipelineConfigurer);\r
- ValidationUtils.validateSettings(pluginConfig, new DMaaPMRSinkPluginConfigValidator());\r
- // validates that input schema contains the field provided in Sink Message Column Name property\r
- final Schema inputSchema = pipelineConfigurer.getStageConfigurer().getInputSchema();\r
- CDAPPluginUtils.validateSchemaContainsFields(inputSchema, pluginConfig.getMessageColumnName());\r
- }\r
-\r
-\r
- @Override\r
- public void prepareRun(BatchSinkContext context) throws Exception {\r
- context.addOutput(Output.of(pluginConfig.getReferenceName(), new DMaaPMROutputFormatProvider(pluginConfig)));\r
- }\r
-\r
- @Override\r
- public void transform(StructuredRecord structuredRecord,\r
- Emitter<KeyValue<String, NullWritable>> emitter) throws Exception {\r
- // get incoming message from structured record\r
- final String incomingMessage = structuredRecord.get(pluginConfig.getMessageColumnName());\r
-\r
- // if incoming messages does not have message column name log warning as it should not happen\r
- if (incomingMessage == null) {\r
- LOG.warn("Column Name: {}, contains no message.Skipped for DMaaP MR Publishing....",\r
- pluginConfig.getMessageColumnName());\r
- } else {\r
-\r
- // emit the messages as key\r
- emitter.emit(new KeyValue<String, NullWritable>(incomingMessage, null));\r
- }\r
- }\r
-}\r
+/*
+ * ===============================LICENSE_START======================================
+ * dcae-analytics
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============================LICENSE_END===========================================
+ */
+
+package org.onap.dcae.apod.analytics.cdap.plugins.batch.sink.dmaap;
+
+import co.cask.cdap.api.annotation.Description;
+import co.cask.cdap.api.annotation.Name;
+import co.cask.cdap.api.annotation.Plugin;
+import co.cask.cdap.api.data.batch.Output;
+import co.cask.cdap.api.data.format.StructuredRecord;
+import co.cask.cdap.api.data.schema.Schema;
+import co.cask.cdap.api.dataset.lib.KeyValue;
+import co.cask.cdap.etl.api.Emitter;
+import co.cask.cdap.etl.api.PipelineConfigurer;
+import co.cask.cdap.etl.api.batch.BatchSink;
+import co.cask.cdap.etl.api.batch.BatchSinkContext;
+import org.apache.hadoop.io.NullWritable;
+import org.onap.dcae.apod.analytics.cdap.common.utils.ValidationUtils;
+import org.onap.dcae.apod.analytics.cdap.plugins.domain.config.dmaap.DMaaPMRSinkPluginConfig;
+import org.onap.dcae.apod.analytics.cdap.plugins.utils.CDAPPluginUtils;
+import org.onap.dcae.apod.analytics.cdap.plugins.validator.DMaaPMRSinkPluginConfigValidator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author Rajiv Singla . Creation Date: 1/26/2017.
+ */
+@Plugin(type = BatchSink.PLUGIN_TYPE)
+@Name("DMaaPMRSink")
+@Description("A batch sink Plugin that publishes messages to DMaaP MR Topic.")
+public class DMaaPMRSink extends BatchSink<StructuredRecord, String, NullWritable> {
+
+ private static final Logger LOG = LoggerFactory.getLogger(DMaaPMRSink.class);
+
+ private final DMaaPMRSinkPluginConfig pluginConfig;
+
+ public DMaaPMRSink(final DMaaPMRSinkPluginConfig pluginConfig) {
+ LOG.debug("Creating DMaaP MR Sink Plugin with plugin Config: {}", pluginConfig);
+ this.pluginConfig = pluginConfig;
+ }
+
+ @Override
+ public void configurePipeline(final PipelineConfigurer pipelineConfigurer) {
+ super.configurePipeline(pipelineConfigurer);
+ ValidationUtils.validateSettings(pluginConfig, new DMaaPMRSinkPluginConfigValidator());
+ // validates that input schema contains the field provided in Sink Message Column Name property
+ final Schema inputSchema = pipelineConfigurer.getStageConfigurer().getInputSchema();
+ CDAPPluginUtils.validateSchemaContainsFields(inputSchema, pluginConfig.getMessageColumnName());
+ }
+
+
+ @Override
+ public void prepareRun(BatchSinkContext context) throws Exception {
+ context.addOutput(Output.of(pluginConfig.getReferenceName(), new DMaaPMROutputFormatProvider(pluginConfig)));
+ }
+
+ @Override
+ public void transform(StructuredRecord structuredRecord,
+ Emitter<KeyValue<String, NullWritable>> emitter) throws Exception {
+ // get incoming message from structured record
+ final String incomingMessage = structuredRecord.get(pluginConfig.getMessageColumnName());
+
+ // if incoming messages does not have message column name log warning as it should not happen
+ if (incomingMessage == null) {
+ LOG.warn("Column Name: {}, contains no message.Skipped for DMaaP MR Publishing....",
+ pluginConfig.getMessageColumnName());
+ } else {
+
+ // emit the messages as key
+ emitter.emit(new KeyValue<String, NullWritable>(incomingMessage, null));
+ }
+ }
+}