-/*\r
- * ===============================LICENSE_START======================================\r
- * dcae-analytics\r
- * ================================================================================\r
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * ================================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- *\r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- *\r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============================LICENSE_END===========================================\r
- */\r
-\r
-package org.openecomp.dcae.apod.analytics.cdap.plugins.sparkcompute.tca;\r
-\r
-import co.cask.cdap.api.annotation.Description;\r
-import co.cask.cdap.api.annotation.Name;\r
-import co.cask.cdap.api.annotation.Plugin;\r
-import co.cask.cdap.api.data.format.StructuredRecord;\r
-import co.cask.cdap.api.data.format.StructuredRecord.Builder;\r
-import co.cask.cdap.api.data.schema.Schema;\r
-import co.cask.cdap.etl.api.PipelineConfigurer;\r
-import co.cask.cdap.etl.api.StageMetrics;\r
-import co.cask.cdap.etl.api.batch.SparkCompute;\r
-import co.cask.cdap.etl.api.batch.SparkExecutionPluginContext;\r
-import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;\r
-import org.apache.spark.api.java.JavaRDD;\r
-import org.apache.spark.api.java.function.Function;\r
-import org.openecomp.dcae.apod.analytics.cdap.common.CDAPMetricsConstants;\r
-import org.openecomp.dcae.apod.analytics.cdap.common.persistance.tca.TCACalculatorMessageType;\r
-import org.openecomp.dcae.apod.analytics.cdap.common.utils.ValidationUtils;\r
-import org.openecomp.dcae.apod.analytics.cdap.plugins.domain.config.tca.SimpleTCAPluginConfig;\r
-import org.openecomp.dcae.apod.analytics.cdap.plugins.utils.CDAPPluginUtils;\r
-import org.openecomp.dcae.apod.analytics.cdap.plugins.validator.SimpleTCAPluginConfigValidator;\r
-import org.openecomp.dcae.apod.analytics.model.domain.policy.tca.MetricsPerEventName;\r
-import org.openecomp.dcae.apod.analytics.model.domain.policy.tca.TCAPolicy;\r
-import org.openecomp.dcae.apod.analytics.model.domain.policy.tca.Threshold;\r
-import org.openecomp.dcae.apod.analytics.tca.processor.TCACEFJsonProcessor;\r
-import org.openecomp.dcae.apod.analytics.tca.processor.TCACEFProcessorContext;\r
-import org.openecomp.dcae.apod.analytics.tca.utils.TCAUtils;\r
-import org.slf4j.Logger;\r
-import org.slf4j.LoggerFactory;\r
-\r
-/**\r
- * @author Rajiv Singla . Creation Date: 2/13/2017.\r
- */\r
-\r
-@Plugin(type = SparkCompute.PLUGIN_TYPE)\r
-@Name("SimpleTCAPlugin")\r
-@Description("Used to create TCA (Threshold Crossing Alert) based on given Policy")\r
-@SuppressFBWarnings("SE_INNER_CLASS")\r
-public class SimpleTCAPlugin extends SparkCompute<StructuredRecord, StructuredRecord> {\r
-\r
- private static final Logger LOG = LoggerFactory.getLogger(SimpleTCAPlugin.class);\r
- private static final long serialVersionUID = 1L;\r
-\r
- private final SimpleTCAPluginConfig pluginConfig;\r
-\r
- /**\r
- * Create an instance of Simple TCA Plugin with give Simple TCA Plugin Config\r
- *\r
- * @param pluginConfig Simple TCA Plugin Config\r
- */\r
- public SimpleTCAPlugin(SimpleTCAPluginConfig pluginConfig) {\r
- this.pluginConfig = pluginConfig;\r
- LOG.info("Creating instance of Simple TCA Plugin with plugin config: {}", pluginConfig);\r
- }\r
-\r
- @Override\r
- public void configurePipeline(PipelineConfigurer pipelineConfigurer) {\r
- super.configurePipeline(pipelineConfigurer);\r
- ValidationUtils.validateSettings(pluginConfig, new SimpleTCAPluginConfigValidator());\r
- final Schema inputSchema = pipelineConfigurer.getStageConfigurer().getInputSchema();\r
- CDAPPluginUtils.validateSchemaContainsFields(inputSchema, pluginConfig.getVesMessageFieldName());\r
- CDAPPluginUtils.setOutputSchema(pipelineConfigurer, pluginConfig.getSchema());\r
- }\r
-\r
- @Override\r
- public JavaRDD<StructuredRecord> transform(final SparkExecutionPluginContext context,\r
- final JavaRDD<StructuredRecord> input) throws Exception {\r
- final StageMetrics metrics = context.getMetrics();\r
-\r
- LOG.debug("Invoking Spark Transform for Simple TCA Plugin");\r
- return input.map(new Function<StructuredRecord, StructuredRecord>() {\r
-\r
- @Override\r
- public StructuredRecord call(StructuredRecord inputStructuredRecord) throws Exception {\r
- TCACalculatorMessageType calculatorMessageType;\r
- String alertMessage = null;\r
-\r
- // Get input structured record\r
- final String cefMessage = inputStructuredRecord.get(pluginConfig.getVesMessageFieldName());\r
-\r
- // Get TCA Policy\r
- final TCAPolicy tcaPolicy = CDAPPluginUtils.readValue(pluginConfig.getPolicyJson(), TCAPolicy.class);\r
-\r
- // create initial processor context\r
- final TCACEFProcessorContext initialProcessorContext =\r
- new TCACEFProcessorContext(cefMessage, tcaPolicy);\r
-\r
- final TCACEFJsonProcessor jsonProcessor = new TCACEFJsonProcessor();\r
- final TCACEFProcessorContext jsonProcessorContext =\r
- jsonProcessor.processMessage(initialProcessorContext);\r
-\r
- if (jsonProcessorContext.getCEFEventListener() != null) {\r
-\r
- LOG.debug("Json to CEF parsing successful. Parsed object {}",\r
- jsonProcessorContext.getCEFEventListener());\r
-\r
- // compute violations\r
- final TCACEFProcessorContext processorContextWithViolations =\r
- TCAUtils.computeThresholdViolations(jsonProcessorContext);\r
-\r
- // if violation are found then create alert message\r
- if (processorContextWithViolations.canProcessingContinue()) {\r
-\r
- alertMessage = TCAUtils.createTCAAlertString(processorContextWithViolations,\r
- pluginConfig.getReferenceName(), pluginConfig.getEnableAlertCEFFormat());\r
- calculatorMessageType = TCACalculatorMessageType.NON_COMPLIANT;\r
-\r
- LOG.debug("VES Threshold Violation Detected.An alert message is be generated: {}",\r
- alertMessage);\r
-\r
- final MetricsPerEventName metricsPerEventName =\r
- processorContextWithViolations.getMetricsPerEventName();\r
- if (metricsPerEventName != null\r
- && metricsPerEventName.getThresholds() != null\r
- && metricsPerEventName.getThresholds().get(0) != null) {\r
- final Threshold violatedThreshold = metricsPerEventName.getThresholds().get(0);\r
- LOG.debug("CEF Message: {}, Violated Threshold: {}", cefMessage, violatedThreshold);\r
- }\r
-\r
- metrics.count(CDAPMetricsConstants.TCA_VES_NON_COMPLIANT_MESSAGES_METRIC, 1);\r
-\r
- } else {\r
- LOG.debug("No Threshold Violation Detected. No alert will be generated.");\r
- calculatorMessageType = TCACalculatorMessageType.COMPLIANT;\r
- metrics.count(CDAPMetricsConstants.TCA_VES_COMPLIANT_MESSAGES_METRIC, 1);\r
- }\r
-\r
- } else {\r
- LOG.info("Unable to parse provided json message to CEF format. Invalid message: {}", cefMessage);\r
- calculatorMessageType = TCACalculatorMessageType.INAPPLICABLE;\r
- }\r
-\r
- LOG.debug("Calculator message type: {} for message: {}", calculatorMessageType, cefMessage);\r
-\r
- final Schema outputSchema = Schema.parseJson(pluginConfig.getSchema());\r
-\r
- // create new output record builder and copy any input record values to output record builder\r
- final Builder outputRecordBuilder =\r
- CDAPPluginUtils.createOutputStructuredRecordBuilder(outputSchema, inputStructuredRecord);\r
-\r
- // add alert field\r
- final Builder outputRecordBuilderWithAlertField =\r
- CDAPPluginUtils.addFieldValueToStructuredRecordBuilder(outputRecordBuilder,\r
- outputSchema, pluginConfig.getAlertFieldName(), alertMessage);\r
-\r
- // add message field type\r
- final Builder outRecordBuilderWithMessageTypeField =\r
- CDAPPluginUtils.addFieldValueToStructuredRecordBuilder(outputRecordBuilderWithAlertField,\r
- outputSchema, pluginConfig.getMessageTypeFieldName(), calculatorMessageType.toString());\r
-\r
- return outRecordBuilderWithMessageTypeField.build();\r
- }\r
- });\r
- }\r
-}\r
+/*
+ * ===============================LICENSE_START======================================
+ * dcae-analytics
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============================LICENSE_END===========================================
+ */
+
+package org.onap.dcae.apod.analytics.cdap.plugins.sparkcompute.tca;
+
+import co.cask.cdap.api.annotation.Description;
+import co.cask.cdap.api.annotation.Name;
+import co.cask.cdap.api.annotation.Plugin;
+import co.cask.cdap.api.data.format.StructuredRecord;
+import co.cask.cdap.api.data.format.StructuredRecord.Builder;
+import co.cask.cdap.api.data.schema.Schema;
+import co.cask.cdap.etl.api.PipelineConfigurer;
+import co.cask.cdap.etl.api.StageMetrics;
+import co.cask.cdap.etl.api.batch.SparkCompute;
+import co.cask.cdap.etl.api.batch.SparkExecutionPluginContext;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.function.Function;
+import org.onap.dcae.apod.analytics.cdap.common.CDAPMetricsConstants;
+import org.onap.dcae.apod.analytics.cdap.common.persistance.tca.TCACalculatorMessageType;
+import org.onap.dcae.apod.analytics.cdap.common.utils.ValidationUtils;
+import org.onap.dcae.apod.analytics.cdap.plugins.domain.config.tca.SimpleTCAPluginConfig;
+import org.onap.dcae.apod.analytics.cdap.plugins.utils.CDAPPluginUtils;
+import org.onap.dcae.apod.analytics.cdap.plugins.validator.SimpleTCAPluginConfigValidator;
+import org.onap.dcae.apod.analytics.model.domain.policy.tca.MetricsPerEventName;
+import org.onap.dcae.apod.analytics.model.domain.policy.tca.TCAPolicy;
+import org.onap.dcae.apod.analytics.model.domain.policy.tca.Threshold;
+import org.onap.dcae.apod.analytics.tca.processor.TCACEFJsonProcessor;
+import org.onap.dcae.apod.analytics.tca.processor.TCACEFProcessorContext;
+import org.onap.dcae.apod.analytics.tca.utils.TCAUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author Rajiv Singla . Creation Date: 2/13/2017.
+ */
+
+@Plugin(type = SparkCompute.PLUGIN_TYPE)
+@Name("SimpleTCAPlugin")
+@Description("Used to create TCA (Threshold Crossing Alert) based on given Policy")
+@SuppressFBWarnings("SE_INNER_CLASS")
+public class SimpleTCAPlugin extends SparkCompute<StructuredRecord, StructuredRecord> {
+
+ private static final Logger LOG = LoggerFactory.getLogger(SimpleTCAPlugin.class);
+ private static final long serialVersionUID = 1L;
+
+ private final SimpleTCAPluginConfig pluginConfig;
+
+ /**
+ * Create an instance of Simple TCA Plugin with give Simple TCA Plugin Config
+ *
+ * @param pluginConfig Simple TCA Plugin Config
+ */
+ public SimpleTCAPlugin(SimpleTCAPluginConfig pluginConfig) {
+ this.pluginConfig = pluginConfig;
+ LOG.info("Creating instance of Simple TCA Plugin with plugin config: {}", pluginConfig);
+ }
+
+ @Override
+ public void configurePipeline(PipelineConfigurer pipelineConfigurer) {
+ super.configurePipeline(pipelineConfigurer);
+ ValidationUtils.validateSettings(pluginConfig, new SimpleTCAPluginConfigValidator());
+ final Schema inputSchema = pipelineConfigurer.getStageConfigurer().getInputSchema();
+ CDAPPluginUtils.validateSchemaContainsFields(inputSchema, pluginConfig.getVesMessageFieldName());
+ CDAPPluginUtils.setOutputSchema(pipelineConfigurer, pluginConfig.getSchema());
+ }
+
+ @Override
+ public JavaRDD<StructuredRecord> transform(final SparkExecutionPluginContext context,
+ final JavaRDD<StructuredRecord> input) throws Exception {
+ final StageMetrics metrics = context.getMetrics();
+
+ LOG.debug("Invoking Spark Transform for Simple TCA Plugin");
+ return input.map(new Function<StructuredRecord, StructuredRecord>() {
+
+ @Override
+ public StructuredRecord call(StructuredRecord inputStructuredRecord) throws Exception {
+ TCACalculatorMessageType calculatorMessageType;
+ String alertMessage = null;
+
+ // Get input structured record
+ final String cefMessage = inputStructuredRecord.get(pluginConfig.getVesMessageFieldName());
+
+ // Get TCA Policy
+ final TCAPolicy tcaPolicy = CDAPPluginUtils.readValue(pluginConfig.getPolicyJson(), TCAPolicy.class);
+
+ // create initial processor context
+ final TCACEFProcessorContext initialProcessorContext =
+ new TCACEFProcessorContext(cefMessage, tcaPolicy);
+
+ final TCACEFJsonProcessor jsonProcessor = new TCACEFJsonProcessor();
+ final TCACEFProcessorContext jsonProcessorContext =
+ jsonProcessor.processMessage(initialProcessorContext);
+
+ if (jsonProcessorContext.getCEFEventListener() != null) {
+
+ LOG.debug("Json to CEF parsing successful. Parsed object {}",
+ jsonProcessorContext.getCEFEventListener());
+
+ // compute violations
+ final TCACEFProcessorContext processorContextWithViolations =
+ TCAUtils.computeThresholdViolations(jsonProcessorContext);
+
+ // if violation are found then create alert message
+ if (processorContextWithViolations.canProcessingContinue()) {
+
+ alertMessage = TCAUtils.createTCAAlertString(processorContextWithViolations,
+ pluginConfig.getReferenceName(), pluginConfig.getEnableAlertCEFFormat());
+ calculatorMessageType = TCACalculatorMessageType.NON_COMPLIANT;
+
+ LOG.debug("VES Threshold Violation Detected.An alert message is be generated: {}",
+ alertMessage);
+
+ final MetricsPerEventName metricsPerEventName =
+ processorContextWithViolations.getMetricsPerEventName();
+ if (metricsPerEventName != null
+ && metricsPerEventName.getThresholds() != null
+ && metricsPerEventName.getThresholds().get(0) != null) {
+ final Threshold violatedThreshold = metricsPerEventName.getThresholds().get(0);
+ LOG.debug("CEF Message: {}, Violated Threshold: {}", cefMessage, violatedThreshold);
+ }
+
+ metrics.count(CDAPMetricsConstants.TCA_VES_NON_COMPLIANT_MESSAGES_METRIC, 1);
+
+ } else {
+ LOG.debug("No Threshold Violation Detected. No alert will be generated.");
+ calculatorMessageType = TCACalculatorMessageType.COMPLIANT;
+ metrics.count(CDAPMetricsConstants.TCA_VES_COMPLIANT_MESSAGES_METRIC, 1);
+ }
+
+ } else {
+ LOG.info("Unable to parse provided json message to CEF format. Invalid message: {}", cefMessage);
+ calculatorMessageType = TCACalculatorMessageType.INAPPLICABLE;
+ }
+
+ LOG.debug("Calculator message type: {} for message: {}", calculatorMessageType, cefMessage);
+
+ final Schema outputSchema = Schema.parseJson(pluginConfig.getSchema());
+
+ // create new output record builder and copy any input record values to output record builder
+ final Builder outputRecordBuilder =
+ CDAPPluginUtils.createOutputStructuredRecordBuilder(outputSchema, inputStructuredRecord);
+
+ // add alert field
+ final Builder outputRecordBuilderWithAlertField =
+ CDAPPluginUtils.addFieldValueToStructuredRecordBuilder(outputRecordBuilder,
+ outputSchema, pluginConfig.getAlertFieldName(), alertMessage);
+
+ // add message field type
+ final Builder outRecordBuilderWithMessageTypeField =
+ CDAPPluginUtils.addFieldValueToStructuredRecordBuilder(outputRecordBuilderWithAlertField,
+ outputSchema, pluginConfig.getMessageTypeFieldName(), calculatorMessageType.toString());
+
+ return outRecordBuilderWithMessageTypeField.build();
+ }
+ });
+ }
+}