TCA: Replace any openecomp reference by onap
[dcaegen2/analytics/tca.git] / dcae-analytics-cdap-plugins / src / test / java / org / onap / dcae / apod / analytics / cdap / plugins / sparkcompute / tca / SimpleTCAPluginTest.java
-/*\r
- * ===============================LICENSE_START======================================\r
- *  dcae-analytics\r
- * ================================================================================\r
- *    Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * ================================================================================\r
- *  Licensed under the Apache License, Version 2.0 (the "License");\r
- *  you may not use this file except in compliance with the License.\r
- *   You may obtain a copy of the License at\r
- *\r
- *          http://www.apache.org/licenses/LICENSE-2.0\r
- *\r
- *  Unless required by applicable law or agreed to in writing, software\r
- *  distributed under the License is distributed on an "AS IS" BASIS,\r
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- *  See the License for the specific language governing permissions and\r
- *  limitations under the License.\r
- *  ============================LICENSE_END===========================================\r
- */\r
-\r
-package org.openecomp.dcae.apod.analytics.cdap.plugins.sparkcompute.tca;\r
-\r
-import co.cask.cdap.api.data.format.StructuredRecord;\r
-import co.cask.cdap.api.data.schema.Schema;\r
-import co.cask.cdap.etl.api.PipelineConfigurer;\r
-import co.cask.cdap.etl.api.StageConfigurer;\r
-import co.cask.cdap.etl.api.batch.SparkExecutionPluginContext;\r
-import org.apache.spark.api.java.JavaRDD;\r
-import org.apache.spark.api.java.JavaSparkContext;\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.mockito.Mockito;\r
-import org.openecomp.dcae.apod.analytics.cdap.common.persistance.tca.TCACalculatorMessageType;\r
-import org.openecomp.dcae.apod.analytics.cdap.plugins.BaseAnalyticsCDAPPluginsUnitTest;\r
-import org.openecomp.dcae.apod.analytics.cdap.plugins.domain.config.tca.TestSimpleTCAPluginConfig;\r
-\r
-import java.util.LinkedList;\r
-import java.util.List;\r
-\r
-import static org.hamcrest.CoreMatchers.is;\r
-import static org.junit.Assert.assertNotNull;\r
-import static org.junit.Assert.assertThat;\r
-import static org.junit.Assert.assertTrue;\r
-import static org.mockito.Mockito.mock;\r
-import static org.mockito.Mockito.times;\r
-import static org.mockito.Mockito.verify;\r
-import static org.mockito.Mockito.when;\r
-\r
-/**\r
- * @author Rajiv Singla . Creation Date: 2/17/2017.\r
- */\r
-public class SimpleTCAPluginTest extends BaseAnalyticsCDAPPluginsUnitTest {\r
-\r
-    private SimpleTCAPlugin simpleTCAPlugin;\r
-\r
-    @Before\r
-    public void before() {\r
-        final TestSimpleTCAPluginConfig testSimpleTCAPluginConfig = getTestSimpleTCAPluginConfig();\r
-        Schema outputSchema = Schema.recordOf(\r
-                "TestSimpleTCAPluginInputSchema",\r
-                Schema.Field.of("message", Schema.of(Schema.Type.STRING)),\r
-                Schema.Field.of("alert", Schema.nullableOf(Schema.of(Schema.Type.STRING))),\r
-                Schema.Field.of("tcaMessageType", Schema.of(Schema.Type.STRING))\r
-        );\r
-        testSimpleTCAPluginConfig.setSchema(outputSchema.toString());\r
-        simpleTCAPlugin = new SimpleTCAPlugin(testSimpleTCAPluginConfig);\r
-    }\r
-\r
-    @Test\r
-    public void testConfigurePipeline() throws Exception {\r
-        final PipelineConfigurer pipelineConfigurer = mock(PipelineConfigurer.class);\r
-        final StageConfigurer stageConfigurer = mock(StageConfigurer.class);\r
-        when(pipelineConfigurer.getStageConfigurer()).thenReturn(stageConfigurer);\r
-        when(stageConfigurer.getInputSchema()).thenReturn(getSimpleTCAPluginInputSchema());\r
-        simpleTCAPlugin.configurePipeline(pipelineConfigurer);\r
-        verify(stageConfigurer, times(1)).getInputSchema();\r
-    }\r
-\r
-    @Test\r
-    public void testTransform() throws Exception {\r
-\r
-        JavaSparkContext javaSparkContext = new JavaSparkContext("local", "test");\r
-\r
-        Schema sourceSchema = Schema.recordOf("CEFMessageSourceSchema",\r
-                Schema.Field.of("message", Schema.of(Schema.Type.STRING))\r
-        );\r
-\r
-        // Inapplicable Message Structured Record\r
-        final StructuredRecord inapplicableSR =\r
-                StructuredRecord.builder(sourceSchema).set("message", "test").build();\r
-        // compliant\r
-        final StructuredRecord compliantSR =\r
-                StructuredRecord.builder(sourceSchema).set("message",\r
-                        fromStream(CEF_MESSAGE_JSON_FILE_LOCATION)).build();\r
-        // non compliant\r
-        final String nonCompliantCEF = fromStream(CEF_NON_COMPLIANT_MESSAGE_JSON_FILE_LOCATION);\r
-        final StructuredRecord nonCompliantSR =\r
-                StructuredRecord.builder(sourceSchema).set("message", nonCompliantCEF).build();\r
-\r
-        final List<StructuredRecord> records = new LinkedList<>();\r
-        records.add(inapplicableSR);\r
-        records.add(compliantSR);\r
-        records.add(nonCompliantSR);\r
-\r
-        final JavaRDD<StructuredRecord> input =\r
-                javaSparkContext.parallelize(records);\r
-        final SparkExecutionPluginContext context = Mockito.mock(SparkExecutionPluginContext.class);\r
-        final MockStageMetrics stageMetrics = Mockito.mock(MockStageMetrics.class);\r
-        when(context.getMetrics()).thenReturn(stageMetrics);\r
-        final List<StructuredRecord> outputRecord = simpleTCAPlugin.transform(context, input).collect();\r
-        assertNotNull(outputRecord);\r
-        assertThat(outputRecord.size(), is(3));\r
-\r
-        assertTrue(outputRecord.get(0).get("tcaMessageType").equals(TCACalculatorMessageType.INAPPLICABLE.toString()));\r
-        assertTrue(outputRecord.get(1).get("tcaMessageType").equals(TCACalculatorMessageType.COMPLIANT.toString()));\r
-        assertTrue(outputRecord.get(2).get("tcaMessageType").equals(TCACalculatorMessageType.NON_COMPLIANT.toString()));\r
-    }\r
-\r
-}\r
+/*
+ * ===============================LICENSE_START======================================
+ *  dcae-analytics
+ * ================================================================================
+ *    Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *   You may obtain a copy of the License at
+ *
+ *          http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============================LICENSE_END===========================================
+ */
+
+package org.onap.dcae.apod.analytics.cdap.plugins.sparkcompute.tca;
+
+import co.cask.cdap.api.data.format.StructuredRecord;
+import co.cask.cdap.api.data.schema.Schema;
+import co.cask.cdap.etl.api.PipelineConfigurer;
+import co.cask.cdap.etl.api.StageConfigurer;
+import co.cask.cdap.etl.api.batch.SparkExecutionPluginContext;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.onap.dcae.apod.analytics.cdap.common.persistance.tca.TCACalculatorMessageType;
+import org.onap.dcae.apod.analytics.cdap.plugins.BaseAnalyticsCDAPPluginsUnitTest;
+import org.onap.dcae.apod.analytics.cdap.plugins.domain.config.tca.TestSimpleTCAPluginConfig;
+
+import java.util.LinkedList;
+import java.util.List;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+/**
+ * @author Rajiv Singla . Creation Date: 2/17/2017.
+ */
+public class SimpleTCAPluginTest extends BaseAnalyticsCDAPPluginsUnitTest {
+
+    private SimpleTCAPlugin simpleTCAPlugin;
+
+    @Before
+    public void before() {
+        final TestSimpleTCAPluginConfig testSimpleTCAPluginConfig = getTestSimpleTCAPluginConfig();
+        Schema outputSchema = Schema.recordOf(
+                "TestSimpleTCAPluginInputSchema",
+                Schema.Field.of("message", Schema.of(Schema.Type.STRING)),
+                Schema.Field.of("alert", Schema.nullableOf(Schema.of(Schema.Type.STRING))),
+                Schema.Field.of("tcaMessageType", Schema.of(Schema.Type.STRING))
+        );
+        testSimpleTCAPluginConfig.setSchema(outputSchema.toString());
+        simpleTCAPlugin = new SimpleTCAPlugin(testSimpleTCAPluginConfig);
+    }
+
+    @Test
+    public void testConfigurePipeline() throws Exception {
+        final PipelineConfigurer pipelineConfigurer = mock(PipelineConfigurer.class);
+        final StageConfigurer stageConfigurer = mock(StageConfigurer.class);
+        when(pipelineConfigurer.getStageConfigurer()).thenReturn(stageConfigurer);
+        when(stageConfigurer.getInputSchema()).thenReturn(getSimpleTCAPluginInputSchema());
+        simpleTCAPlugin.configurePipeline(pipelineConfigurer);
+        verify(stageConfigurer, times(1)).getInputSchema();
+    }
+
+    @Test
+    public void testTransform() throws Exception {
+
+        JavaSparkContext javaSparkContext = new JavaSparkContext("local", "test");
+
+        Schema sourceSchema = Schema.recordOf("CEFMessageSourceSchema",
+                Schema.Field.of("message", Schema.of(Schema.Type.STRING))
+        );
+
+        // Inapplicable Message Structured Record
+        final StructuredRecord inapplicableSR =
+                StructuredRecord.builder(sourceSchema).set("message", "test").build();
+        // compliant
+        final StructuredRecord compliantSR =
+                StructuredRecord.builder(sourceSchema).set("message",
+                        fromStream(CEF_MESSAGE_JSON_FILE_LOCATION)).build();
+        // non compliant
+        final String nonCompliantCEF = fromStream(CEF_NON_COMPLIANT_MESSAGE_JSON_FILE_LOCATION);
+        final StructuredRecord nonCompliantSR =
+                StructuredRecord.builder(sourceSchema).set("message", nonCompliantCEF).build();
+
+        final List<StructuredRecord> records = new LinkedList<>();
+        records.add(inapplicableSR);
+        records.add(compliantSR);
+        records.add(nonCompliantSR);
+
+        final JavaRDD<StructuredRecord> input =
+                javaSparkContext.parallelize(records);
+        final SparkExecutionPluginContext context = Mockito.mock(SparkExecutionPluginContext.class);
+        final MockStageMetrics stageMetrics = Mockito.mock(MockStageMetrics.class);
+        when(context.getMetrics()).thenReturn(stageMetrics);
+        final List<StructuredRecord> outputRecord = simpleTCAPlugin.transform(context, input).collect();
+        assertNotNull(outputRecord);
+        assertThat(outputRecord.size(), is(3));
+
+        assertTrue(outputRecord.get(0).get("tcaMessageType").equals(TCACalculatorMessageType.INAPPLICABLE.toString()));
+        assertTrue(outputRecord.get(1).get("tcaMessageType").equals(TCACalculatorMessageType.COMPLIANT.toString()));
+        assertTrue(outputRecord.get(2).get("tcaMessageType").equals(TCACalculatorMessageType.NON_COMPLIANT.toString()));
+    }
+
+}