seed code of des (data extraction service) 76/110676/8
authorKai <lukai@chinamobile.com>
Wed, 29 Jul 2020 03:18:59 +0000 (11:18 +0800)
committerKai <lukai@chinamobile.com>
Tue, 4 Aug 2020 04:10:04 +0000 (12:10 +0800)
Issue-ID: DCAEGEN2-2258
Signed-off-by: Kai Lu <lukai@chinamobile.com>
Change-Id: I576069a0016b2ce690274734d1c4d1c70deb0b05

16 files changed:
components/datalake-handler/des/Dockerfile [new file with mode: 0644]
components/datalake-handler/des/README.md [new file with mode: 0644]
components/datalake-handler/des/pom.xml [new file with mode: 0644]
components/datalake-handler/des/src/assembly/init_db/scripts/init_db.sql [new file with mode: 0644]
components/datalake-handler/des/src/assembly/init_db/scripts/init_db_data.sql [new file with mode: 0644]
components/datalake-handler/des/src/assembly/run.sh [new file with mode: 0644]
components/datalake-handler/des/src/main/java/org/onap/datalake/des/DesApplication.java [new file with mode: 0644]
components/datalake-handler/des/src/main/java/org/onap/datalake/des/SwaggerConfig.java [new file with mode: 0644]
components/datalake-handler/des/src/main/java/org/onap/datalake/des/controller/DataExposureController.java [new file with mode: 0644]
components/datalake-handler/des/src/main/java/org/onap/datalake/des/domain/DataExposure.java [new file with mode: 0644]
components/datalake-handler/des/src/main/java/org/onap/datalake/des/dto/DataExposureConfig.java [new file with mode: 0644]
components/datalake-handler/des/src/main/java/org/onap/datalake/des/repository/DataExposureRepository.java [new file with mode: 0644]
components/datalake-handler/des/src/main/java/org/onap/datalake/des/service/DataExposureService.java [new file with mode: 0644]
components/datalake-handler/des/src/main/resources/application.properties [new file with mode: 0644]
components/datalake-handler/des/src/main/resources/logback.xml [new file with mode: 0644]
components/datalake-handler/des/src/main/resources/swagger.json [new file with mode: 0644]

diff --git a/components/datalake-handler/des/Dockerfile b/components/datalake-handler/des/Dockerfile
new file mode 100644 (file)
index 0000000..a5d590a
--- /dev/null
@@ -0,0 +1,50 @@
+# ============LICENSE_START===================================================
+#  Copyright (C) 2020 China Mobile.
+# ============================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=====================================================
+FROM openjdk:11-jre-slim
+
+MAINTAINER Guobiao Mo <guobiaomo@chinamobile.com>
+
+EXPOSE 1681
+
+RUN groupadd -r datalake && useradd -r -g datalake datalake
+RUN mkdir /home/datalake/
+
+USER datalake
+WORKDIR /home/datalake
+
+#add the fat jar
+COPY target/${JAR_FILE} /home/datalake/
+COPY src/assembly/run.sh /home/datalake/
+
+WORKDIR /home/datalake/db_init
+ADD src/assembly/init_db/scripts/db_init .
+USER root
+RUN chmod 0755 ./*
+WORKDIR /home/datalake
+COPY src/assembly/init_db/scripts/init_db.sql .
+COPY src/assembly/init_db/scripts/init_db_data.sql .
+
+RUN apt update && \
+    apt install -y mariadb-client && \
+    apt install -y curl
+
+USER datalake
+
+CMD ["sh", "run.sh"]
+
diff --git a/components/datalake-handler/des/README.md b/components/datalake-handler/des/README.md
new file mode 100644 (file)
index 0000000..9eb7093
--- /dev/null
@@ -0,0 +1 @@
+DataLake Data Exposure Service provides a framework to expose data in BigData databases via REST API with just configurations.
diff --git a/components/datalake-handler/des/pom.xml b/components/datalake-handler/des/pom.xml
new file mode 100644 (file)
index 0000000..c0346ce
--- /dev/null
@@ -0,0 +1,284 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ============LICENSE_START=======================================================
+   Copyright (C) 2020 China Mobile.
+  ================================================================================
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+
+  SPDX-License-Identifier: Apache-2.0
+  ============LICENSE_END=========================================================
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+       xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+       xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+       <modelVersion>4.0.0</modelVersion>
+
+       <parent>
+               <groupId>org.onap.dcaegen2.services.components</groupId>
+               <artifactId>datalake-handler</artifactId>
+               <version>1.2.0-SNAPSHOT</version>
+       </parent>
+
+       <groupId>org.onap.dcaegen2.services.components.datalake-handler</groupId>
+       <artifactId>des</artifactId>
+       <packaging>jar</packaging>
+       <name>DataLake Exposure Service</name>
+
+       <properties>
+               <swagger.version>2.9.2</swagger.version>
+               <dockerfile-maven.version>1.4.5</dockerfile-maven.version>
+               <docker.image.path>onap/org.onap.dcaegen2.services.datalake.exposure.service</docker.image.path>
+               <maven.build.timestamp.format>yyyyMMdd'T'HHmmss</maven.build.timestamp.format>
+       </properties>
+
+       <dependencies>
+
+               <dependency>
+                       <groupId>org.jdom</groupId>
+                       <artifactId>jdom2</artifactId>
+                       <version>2.0.6</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>com.facebook.presto</groupId>
+                       <artifactId>presto-jdbc</artifactId>
+                       <version>0.229</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.apache.hadoop</groupId>
+                       <artifactId>hadoop-client</artifactId>
+                       <version>${hadoop.version}</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.mariadb.jdbc</groupId>
+                       <artifactId>mariadb-java-client</artifactId>
+                       <version>2.4.1</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.json</groupId>
+                       <artifactId>json</artifactId>
+                       <version>20190722</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.apache.httpcomponents</groupId>
+                       <artifactId>httpclient</artifactId>
+                       <version>4.5.10</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.apache.kafka</groupId>
+                       <artifactId>kafka-clients</artifactId>
+                       <version>2.3.1</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.springframework.boot</groupId>
+                       <artifactId>spring-boot-starter-web</artifactId>
+                       <version>${springboot.version}</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.springframework.boot</groupId>
+                       <artifactId>spring-boot-starter-actuator</artifactId>
+                       <version>${springboot.version}</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.springframework.boot</groupId>
+                       <artifactId>spring-boot-starter-data-jpa</artifactId>
+                       <version>${springboot.version}</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.springframework.boot</groupId>
+                       <artifactId>spring-boot-starter-data-couchbase</artifactId>
+                       <version>${springboot.version}</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.springframework.boot</groupId>
+                       <artifactId>spring-boot-starter-test</artifactId>
+                       <version>${springboot.version}</version>
+                       <scope>test</scope>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.springframework.boot</groupId>
+                       <artifactId>spring-boot-configuration-processor</artifactId>
+                       <version>${springboot.version}</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.elasticsearch.client</groupId>
+                       <artifactId>elasticsearch-rest-high-level-client</artifactId>
+                       <version>${elasticsearchjava.version}</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>commons-io</groupId>
+                       <artifactId>commons-io</artifactId>
+                       <version>2.6</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>com.fasterxml.jackson.dataformat</groupId>
+                       <artifactId>jackson-dataformat-yaml</artifactId>
+                       <version>${jackson.version}</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>com.fasterxml.jackson.dataformat</groupId>
+                       <artifactId>jackson-dataformat-xml</artifactId>
+                       <version>${jackson.version}</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>com.fasterxml.jackson.core</groupId>
+                       <artifactId>jackson-databind</artifactId>
+                       <version>${jackson.version}</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>com.google.code.gson</groupId>
+                       <artifactId>gson</artifactId>
+                       <version>2.8.2</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.projectlombok</groupId>
+                       <artifactId>lombok</artifactId>
+                       <version>1.18.10</version>
+                       <scope>provided</scope>
+               </dependency>
+
+               <dependency>
+                       <groupId>io.druid</groupId>
+                       <artifactId>tranquility-core_2.11</artifactId>
+                       <version>0.8.3</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.apache.velocity</groupId>
+                       <artifactId>velocity-engine-core</artifactId>
+                       <version>2.1</version>
+               </dependency>
+
+
+               <dependency>
+                       <groupId>org.hibernate</groupId>
+                       <artifactId>hibernate-core</artifactId>
+                       <version>5.3.7.Final</version>
+               </dependency>
+
+               <!-- jsr303 validation -->
+               <dependency>
+                       <groupId>javax.validation</groupId>
+                       <artifactId>validation-api</artifactId>
+                       <version>2.0.1.Final</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.hibernate</groupId>
+                       <artifactId>hibernate-validator</artifactId>
+                       <version>6.1.0.Final</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>io.springfox</groupId>
+                       <artifactId>springfox-swagger2</artifactId>
+                       <version>${swagger.version}</version>
+                       <scope>compile</scope>
+               </dependency>
+
+               <dependency>
+                       <groupId>io.springfox</groupId>
+                       <artifactId>springfox-swagger-ui</artifactId>
+                       <version>${swagger.version}</version>
+                       <scope>compile</scope>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.mongodb</groupId>
+                       <artifactId>mongo-java-driver</artifactId>
+                       <version>${mongojava.version}</version>
+               </dependency>
+
+               <dependency>
+                       <groupId>com.couchbase.mock</groupId>
+                       <artifactId>CouchbaseMock</artifactId>
+                       <version>1.5.22</version>
+                       <scope>test</scope>
+               </dependency>
+
+               <dependency>
+                       <groupId>org.onap.dcaegen2.services.components.datalake-handler</groupId>
+                       <artifactId>feeder</artifactId>
+               </dependency>
+
+       </dependencies>
+
+
+       <build>
+               <plugins>
+                       <plugin>
+                               <groupId>org.springframework.boot</groupId>
+                               <artifactId>spring-boot-maven-plugin</artifactId>
+                       </plugin>
+                       <plugin>
+                               <groupId>com.spotify</groupId>
+                               <artifactId>dockerfile-maven-plugin</artifactId>
+                               <version>${dockerfile-maven.version}</version>
+                               <configuration>
+                                       <!-- <username>docker</username> <password>docker</password> -->
+                                       <!-- repository>repo.treescale.com/moguobiao/datalake-feeder-maven</repository -->
+                                       <!-- repository>moguobiao/datalake-feeder-maven-spotify</repository -->
+                                       <repository>${onap.nexus.dockerregistry.daily}/${docker.image.path}</repository>
+                                       <!-- <repository>mizunoami123/dl-feeder</repository> -->
+                                       <tag>${project.version}</tag>
+                                       <dockerfile>Dockerfile</dockerfile>
+                                       <!-- useMavenSettingsForAuth>true</useMavenSettingsForAuth -->
+                                       <buildArgs>
+                                               <JAR_FILE>${project.build.finalName}.jar</JAR_FILE>
+                                       </buildArgs>
+                               </configuration>
+                               <!-- <executions> <execution> <id>build-sl-des-image</id> <phase>package</phase> 
+                                       <goals> <goal>build</goal> </goals> </execution> <execution> <id>tag-and-push-image-latest</id> 
+                                       <phase>package</phase> <goals> <goal>tag</goal> <goal>push</goal> </goals> 
+                                       <configuration> <repository>${onap.nexus.dockerregistry.daily}/${docker.image.path}</repository> 
+                                       <tag>latest</tag> <useMavenSettingsForAuth>true</useMavenSettingsForAuth> 
+                                       </configuration> </execution> <execution> <id>tag-and-push-image-with-version</id> 
+                                       <phase>package</phase> <goals> <goal>tag</goal> <goal>push</goal> </goals> 
+                                       <configuration> <repository>${onap.nexus.dockerregistry.daily}/${docker.image.path}</repository> 
+                                       <tag>${project.version}</tag> <useMavenSettingsForAuth>true</useMavenSettingsForAuth> 
+                                       </configuration> </execution> <execution> <id>tag-and-push-image-with-version-and-date</id> 
+                                       <phase>package</phase> <goals> <goal>tag</goal> <goal>push</goal> </goals> 
+                                       <configuration> <repository>${onap.nexus.dockerregistry.daily}/${docker.image.path}</repository> 
+                                       <tag>${project.version}-${maven.build.timestamp}Z</tag> <useMavenSettingsForAuth>true</useMavenSettingsForAuth> 
+                                       </configuration> </execution> </executions> -->
+                               <dependencies>
+                                       <!-- To make this work on JDK 9+ -->
+                                       <dependency>
+                                               <groupId>javax.activation</groupId>
+                                               <artifactId>javax.activation-api</artifactId>
+                                               <version>1.2.0</version>
+                                       </dependency>
+                               </dependencies>
+                       </plugin>
+               </plugins>
+       </build>
+</project>
diff --git a/components/datalake-handler/des/src/assembly/init_db/scripts/init_db.sql b/components/datalake-handler/des/src/assembly/init_db/scripts/init_db.sql
new file mode 100644 (file)
index 0000000..e71093a
--- /dev/null
@@ -0,0 +1,143 @@
+
+/*
+* ============LICENSE_START=======================================================
+* ONAP : DATALAKE
+* ================================================================================
+* Copyright 2020 China Mobile
+*=================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+*/
+drop DATABASE datalake;
+create database datalake;
+use datalake;
+CREATE TABLE `topic_name` (
+  `id` varchar(255) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+CREATE TABLE `db_type` (
+  `id` varchar(255) NOT NULL,
+  `default_port` int(11) DEFAULT NULL,
+  `name` varchar(255) NOT NULL,
+  `tool` bit(1) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+CREATE TABLE `db` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `database_name` varchar(255) DEFAULT NULL,
+  `enabled` bit(1) NOT NULL,
+  `encrypt` bit(1) DEFAULT NULL,
+  `host` varchar(255) DEFAULT NULL,
+  `login` varchar(255) DEFAULT NULL,
+  `name` varchar(255) DEFAULT NULL,
+  `pass` varchar(255) DEFAULT NULL,
+  `port` int(11) DEFAULT NULL,
+  `property1` varchar(255) DEFAULT NULL,
+  `property2` varchar(255) DEFAULT NULL,
+  `property3` varchar(255) DEFAULT NULL,
+  `db_type_id` varchar(255) NOT NULL,
+  `presto_catalog` varchar(255) DEFAULT NULL,
+  PRIMARY KEY (`id`),
+  KEY `FK3njadtw43ieph7ftt4kxdhcko` (`db_type_id`),
+  CONSTRAINT `FK3njadtw43ieph7ftt4kxdhcko` FOREIGN KEY (`db_type_id`) REFERENCES `db_type` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+CREATE TABLE `design_type` (
+  `id` varchar(255) NOT NULL,
+  `name` varchar(255) DEFAULT NULL,
+  `note` varchar(255) DEFAULT NULL,
+  `db_type_id` varchar(255) NOT NULL,
+  PRIMARY KEY (`id`),
+  KEY `FKm8rkv2qkq01gsmeq1c3y4w02x` (`db_type_id`),
+  CONSTRAINT `FKm8rkv2qkq01gsmeq1c3y4w02x` FOREIGN KEY (`db_type_id`) REFERENCES `db_type` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+CREATE TABLE `design` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `body` text DEFAULT NULL,
+  `name` varchar(255) DEFAULT NULL,
+  `note` varchar(255) DEFAULT NULL,
+  `submitted` bit(1) DEFAULT NULL,
+  `design_type_id` varchar(255) NOT NULL,
+  `topic_name_id` varchar(255) NOT NULL,
+  PRIMARY KEY (`id`),
+  KEY `FKo43yi6aputq6kwqqu8eqbspm5` (`design_type_id`),
+  KEY `FKabb8e74230glxpaiai4aqsr34` (`topic_name_id`),
+  CONSTRAINT `FKabb8e74230glxpaiai4aqsr34` FOREIGN KEY (`topic_name_id`) REFERENCES `topic_name` (`id`),
+  CONSTRAINT `FKo43yi6aputq6kwqqu8eqbspm5` FOREIGN KEY (`design_type_id`) REFERENCES `design_type` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+CREATE TABLE `kafka` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `broker_list` varchar(255) NOT NULL,
+  `consumer_count` int(11) DEFAULT 3,
+  `enabled` bit(1) NOT NULL,
+  `excluded_topic` varchar(1023) DEFAULT '__consumer_offsets,__transaction_state',
+  `group` varchar(255) DEFAULT 'datalake',
+  `included_topic` varchar(255) DEFAULT NULL,
+  `login` varchar(255) DEFAULT NULL,
+  `name` varchar(255) NOT NULL,
+  `pass` varchar(255) DEFAULT NULL,
+  `secure` bit(1) DEFAULT b'0',
+  `security_protocol` varchar(255) DEFAULT NULL,
+  `timeout_sec` int(11) DEFAULT 10,
+  `zk` varchar(255) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+CREATE TABLE `topic` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `aggregate_array_path` varchar(255) DEFAULT NULL,
+  `correlate_cleared_message` bit(1) NOT NULL DEFAULT b'0',
+  `data_format` varchar(255) DEFAULT NULL,
+  `enabled` bit(1) NOT NULL,
+  `flatten_array_path` varchar(255) DEFAULT NULL,
+  `login` varchar(255) DEFAULT NULL,
+  `message_id_path` varchar(255) DEFAULT NULL,
+  `pass` varchar(255) DEFAULT NULL,
+  `save_raw` bit(1) NOT NULL DEFAULT b'0',
+  `ttl_day` int(11) DEFAULT NULL,
+  `topic_name_id` varchar(255) NOT NULL,
+  PRIMARY KEY (`id`),
+  KEY `FKj3pldlfaokdhqjfva8n3pkjca` (`topic_name_id`),
+  CONSTRAINT `FKj3pldlfaokdhqjfva8n3pkjca` FOREIGN KEY (`topic_name_id`) REFERENCES `topic_name` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+CREATE TABLE `map_db_design` (
+  `design_id` int(11) NOT NULL,
+  `db_id` int(11) NOT NULL,
+  PRIMARY KEY (`design_id`,`db_id`),
+  KEY `FKhpn49r94k05mancjtn301m2p0` (`db_id`),
+  CONSTRAINT `FKfli240v96cfjbnmjqc0fvvd57` FOREIGN KEY (`design_id`) REFERENCES `design` (`id`),
+  CONSTRAINT `FKhpn49r94k05mancjtn301m2p0` FOREIGN KEY (`db_id`) REFERENCES `db` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+CREATE TABLE `map_db_topic` (
+  `topic_id` int(11) NOT NULL,
+  `db_id` int(11) NOT NULL,
+  PRIMARY KEY (`db_id`,`topic_id`),
+  KEY `FKq1jon185jnrr7dv1dd8214uw0` (`topic_id`),
+  CONSTRAINT `FKirro29ojp7jmtqx9m1qxwixcc` FOREIGN KEY (`db_id`) REFERENCES `db` (`id`),
+  CONSTRAINT `FKq1jon185jnrr7dv1dd8214uw0` FOREIGN KEY (`topic_id`) REFERENCES `topic` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+CREATE TABLE `map_kafka_topic` (
+  `kafka_id` int(11) NOT NULL,
+  `topic_id` int(11) NOT NULL,
+  PRIMARY KEY (`topic_id`,`kafka_id`),
+  KEY `FKtdrme4h7rxfh04u2i2wqu23g5` (`kafka_id`),
+  CONSTRAINT `FK5q7jdxy54au5rcrhwa4a5igqi` FOREIGN KEY (`topic_id`) REFERENCES `topic` (`id`),
+  CONSTRAINT `FKtdrme4h7rxfh04u2i2wqu23g5` FOREIGN KEY (`kafka_id`) REFERENCES `kafka` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+CREATE TABLE `data_exposure` (
+  `id` varchar(255) NOT NULL,
+  `note` varchar(255) DEFAULT NULL,
+  `sql_template` varchar(10000) NOT NULL,
+  `db_id` int(11) NOT NULL,
+  PRIMARY KEY (`id`),
+  KEY `FKf5ps4jxauwawk4ac86t5t6xev` (`db_id`),
+  CONSTRAINT `FKf5ps4jxauwawk4ac86t5t6xev` FOREIGN KEY (`db_id`) REFERENCES `db` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
diff --git a/components/datalake-handler/des/src/assembly/init_db/scripts/init_db_data.sql b/components/datalake-handler/des/src/assembly/init_db/scripts/init_db_data.sql
new file mode 100644 (file)
index 0000000..234351f
--- /dev/null
@@ -0,0 +1,95 @@
+
+/*
+* ============LICENSE_START=======================================================
+* ONAP : DATALAKE
+* ================================================================================
+* Copyright 2020 China Mobile
+*=================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+*/
+INSERT INTO datalake.kafka(
+   id
+  ,name
+  ,consumer_count
+  ,enabled
+  ,`group`
+  ,broker_list
+  ,included_topic
+  ,login
+  ,pass
+  ,secure
+  ,security_protocol
+  ,timeout_sec
+  ,zk
+) VALUES (
+  1
+  ,'main Kafka cluster' -- name - IN varchar(255)
+  ,3   -- consumer_count - IN int(11)
+  ,1   -- enabled - IN bit(1)
+  ,'dlgroup'  -- group - IN varchar(255)
+  ,'message-router-kafka:9092'  -- host_port - IN varchar(255)
+  ,''  -- included_topic - IN varchar(255)
+  ,'admin'  -- login - IN varchar(255)
+  ,'admin-secret'  -- pass - IN varchar(255)
+  ,0   -- secure - IN bit(1)
+  ,'SASL_PLAINTEXT'  -- security_protocol - IN varchar(255)
+  ,10   -- timeout_sec - IN int(11)
+  ,'message-router-zookeeper:2181'  -- zk - IN varchar(255)
+);
+insert into db_type (`id`, `name`, tool) values ('CB', 'Couchbase', false);
+insert into db_type (`id`, `name`, tool) values ('ES', 'Elasticsearch', false);
+insert into db_type (`id`, `name`, tool,`default_port`) values ('MONGO', 'MongoDB', false, 27017);
+insert into db_type (`id`, `name`, tool) values ('DRUID', 'Druid', false);
+insert into db_type (`id`, `name`, tool) values ('HDFS', 'HDFS', false);
+insert into db_type (`id`, `name`, tool) values ('KIBANA', 'Kibana', true);
+insert into db_type (`id`, `name`, tool) values ('SUPERSET', 'Apache Superset', true);
+insert into db (id, db_type_id, enabled, encrypt, `name`,`host`,`login`,`pass`,`database_name`) values (1, 'CB', true, true, 'Couchbase 1','dl-couchbase','dl','dl1234','datalake');
+insert into db (id, db_type_id, enabled, encrypt, `name`,`host`) values (2, 'ES', true, true, 'Elasticsearch','dl-es');
+insert into db (id, db_type_id, enabled, encrypt, `name`,`host`,`port`,`database_name`,`presto_catalog`) values (3, 'MONGO', true, true, 'MongoDB 1','dl-mongodb',27017,'datalake','mongodb');
+insert into db (id, db_type_id, enabled, encrypt, `name`,`host`) values (4, 'DRUID', true, true, 'Druid','dl-druid');
+insert into db (id, db_type_id, enabled, encrypt, `name`,`host`,`login`) values (5, 'HDFS', true, true, 'Hadoop Cluster','dl-hdfs','dl');
+insert into db (id, db_type_id, enabled, encrypt, `name`,`host`) values (6, 'KIBANA', true, false, 'Kibana demo','dl-es');
+insert into db (id, db_type_id, enabled, encrypt, `name`,`host`) values (7, 'SUPERSET', true, false, 'Superset demo','dl-druid');
+insert into topic_name (id) values ('_DL_DEFAULT_');
+insert into topic_name (id) values ('unauthenticated.SEC_FAULT_OUTPUT');
+insert into topic_name (id) values ('unauthenticated.VES_MEASUREMENT_OUTPUT');
+insert into topic_name (id) values ('EPC');
+insert into topic_name (id) values ('HW');
+-- in production, default enabled should be off
+insert into `topic`(id, `topic_name_id`,`enabled`,`save_raw`,`ttl_day`,`data_format`) values (1, '_DL_DEFAULT_',1,0,3650,'JSON');
+insert into `topic`(id, `topic_name_id`,correlate_cleared_message,`enabled`, message_id_path,`data_format`) 
+values (2, 'unauthenticated.SEC_FAULT_OUTPUT',1,1,'/event/commonEventHeader/eventName,/event/commonEventHeader/reportingEntityName,/event/faultFields/specificProblem,/event/commonEventHeader/eventId','JSON');
+insert into `topic`(id, `topic_name_id`,`enabled`, aggregate_array_path,flatten_array_path,`data_format`) 
+values (3, 'unauthenticated.VES_MEASUREMENT_OUTPUT',1,
+'/event/measurementsForVfScalingFields/memoryUsageArray,/event/measurementsForVfScalingFields/diskUsageArray,/event/measurementsForVfScalingFields/cpuUsageArray,/event/measurementsForVfScalingFields/vNicPerformanceArray',
+'/event/measurementsForVfScalingFields/astriMeasurement/astriDPMeasurementArray/astriInterface',
+'JSON');
+insert into `topic`(id, `topic_name_id`,`enabled`,  flatten_array_path,`data_format`) 
+values (4, 'EPC',1, '/event/measurementsForVfScalingFields/astriMeasurement/astriDPMeasurementArray/astriInterface', 'JSON');
+insert into `topic`(id, `topic_name_id`,`enabled`, aggregate_array_path,`data_format`) 
+values (5, 'HW',1,
+'/event/measurementsForVfScalingFields/memoryUsageArray,/event/measurementsForVfScalingFields/diskUsageArray,/event/measurementsForVfScalingFields/cpuUsageArray,/event/measurementsForVfScalingFields/vNicPerformanceArray',
+'JSON'); 
+insert into `map_db_topic`(`db_id`,`topic_id`) select db.id, topic.id from db_type, db, topic where db.db_type_id=db_type.id and db_type.tool=0;
+insert into `map_kafka_topic`(`kafka_id`,`topic_id`) select kafka.id, topic.id from kafka, topic;
+insert into design_type (id, `name`, `db_type_id`) values ('KIBANA_DB', 'Kibana Dashboard', 'KIBANA');
+insert into design_type (id, `name`, `db_type_id`) values ('KIBANA_SEARCH', 'Kibana Search', 'KIBANA');
+insert into design_type (id, `name`, `db_type_id`) values ('KIBANA_VISUAL', 'Kibana Visualization', 'KIBANA');
+insert into design_type (id, `name`, `db_type_id`) values ('ES_MAPPING', 'Elasticsearch Field Mapping Template', 'ES');
+insert into design_type (id, `name`, `db_type_id`) values ('DRUID_KAFKA_SPEC', 'Druid Kafka Indexing Service Supervisor Spec', 'DRUID');
+insert into design (id, `name`,topic_name_id, `submitted`,`body`, design_type_id) values (1, 'Kibana Dashboard on EPC test1', 'EPC',  0, 'body here', 'KIBANA_DB');
+insert into map_db_design (`design_id`,`db_id` ) values (1, 6);
+insert into `data_exposure`(`id`,`note`,`sql_template`,`db_id`) values ('totalBandwidth','KPI bandwidth history','select  from_unixtime(commonEventHeader.lastEpochMicrosec/1000) as timeStamp, sum(measurementFields.additionalFields."UPF.N3IncPkt._Dnn"+measurementFields.additionalFields."UPF.N3OgPkt._Dnn") as bandwidth from upf where commonEventHeader.sourceId = ''${id}'' and ( from_unixtime(commonEventHeader.lastEpochMicrosec/1000) between  from_iso8601_timestamp( ''${timeStamp}'') - interval ''${hour}'' hour  and from_iso8601_timestamp( ''${timeStamp}'') ) group by  commonEventHeader.lastEpochMicrosec order by commonEventHeader.lastEpochMicrosec desc ',3);
+insert into `data_exposure`(`id`,`note`,`sql_template`,`db_id`) values ('totalTraffic','KPI sum over history','select commonEventHeader.sourceId as id,  sum(measurementFields.additionalFields."UPF.N3IncPkt._Dnn"+measurementFields.additionalFields."UPF.N3OgPkt._Dnn") as totalTraffic from upf where commonEventHeader.sourceId = ''${id}''  and  from_unixtime(commonEventHeader.lastEpochMicrosec/1000) <= from_iso8601_timestamp( ''${timeStamp}'') ',3);
+insert into `data_exposure`(`id`,`note`,`sql_template`,`db_id`) values ('userNumber','KPI',' select  from_unixtime(commonEventHeader.lastEpochMicrosec/1000) as timeStamp, sum(measurementFields.additionalFields."AMF.RegSub._NS")  as userNumber from amf where commonEventHeader.sourceId = ''${id}'' and ( from_unixtime(commonEventHeader.lastEpochMicrosec/1000) between  from_iso8601_timestamp( ''${timeStamp}'') - interval ''${hour}'' hour  and from_iso8601_timestamp( ''${timeStamp}'') ) group by  commonEventHeader.lastEpochMicrosec, commonEventHeader.sourceId  order by commonEventHeader.lastEpochMicrosec   desc ',3);
diff --git a/components/datalake-handler/des/src/assembly/run.sh b/components/datalake-handler/des/src/assembly/run.sh
new file mode 100644 (file)
index 0000000..363daf6
--- /dev/null
@@ -0,0 +1,35 @@
+# ============LICENSE_START===================================================
+#  Copyright (C) 2020 China Mobile.
+# ============================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=====================================================
+#!/bin/sh
+
+echo "start init db ..."
+
+/bin/run-parts /home/datalake/db_init
+
+echo "finish init db"
+
+cmd=`find . -regex  '\./feeder-[0-9]+\.[0-9]+\.[0-9]+[-SNAPSHOT]+\.jar'`
+cmd1=`find . -regex '\./feeder-[0-9]+\.[0-9]+\.[0-9]+\.jar'`
+if [ -n "$cmd" ]; then
+    java -jar $cmd
+elif [ -n "$cmd1" ]; then
+    java -jar $cmd1
+else
+    echo "STRING is empty"
+    sleep 10000
+fi
diff --git a/components/datalake-handler/des/src/main/java/org/onap/datalake/des/DesApplication.java b/components/datalake-handler/des/src/main/java/org/onap/datalake/des/DesApplication.java
new file mode 100644 (file)
index 0000000..afb0fef
--- /dev/null
@@ -0,0 +1,43 @@
+/*
+* ============LICENSE_START=======================================================
+* ONAP : Data Extraction Service
+* ================================================================================
+* Copyright 2020 China Mobile
+*=================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+*/
+
+package org.onap.datalake.des;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+import springfox.documentation.swagger2.annotations.EnableSwagger2;
+
+/**
+ * Entry point of the Data Extraction Service application
+ * 
+ * @author Kai Lu
+ *
+ */
+
+@SpringBootApplication
+@EnableSwagger2
+public class DesApplication {
+
+       public static void main(String[] args) {
+               SpringApplication.run(DesApplication.class, args);
+       }
+
+}
diff --git a/components/datalake-handler/des/src/main/java/org/onap/datalake/des/SwaggerConfig.java b/components/datalake-handler/des/src/main/java/org/onap/datalake/des/SwaggerConfig.java
new file mode 100644 (file)
index 0000000..79022e5
--- /dev/null
@@ -0,0 +1,67 @@
+/*
+* ============LICENSE_START=======================================================
+* ONAP : Data Extraction Service
+* ================================================================================
+* Copyright 2020 China Mobile
+*=================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+*/
+
+package org.onap.datalake.des;
+
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import springfox.documentation.builders.ApiInfoBuilder;
+import springfox.documentation.builders.PathSelectors;
+import springfox.documentation.builders.RequestHandlerSelectors;
+import springfox.documentation.service.ApiInfo;
+import springfox.documentation.spi.DocumentationType;
+import springfox.documentation.spring.web.plugins.Docket;
+import springfox.documentation.swagger2.annotations.EnableSwagger2;
+
+/**
+ * For Swagger integration
+ * 
+ * @author Kai Lu
+ *
+ */
+
+@Configuration
+@EnableSwagger2
+public class SwaggerConfig {
+
+    /**
+     * produceApi.
+     *
+     * @return Docket Docket
+     *
+     */
+       @Bean
+       public Docket produceApi() {
+               return new Docket(DocumentationType.SWAGGER_2).apiInfo(apiInfo()).select()
+                               .apis(RequestHandlerSelectors.basePackage("org.onap.datalake.des")).paths(PathSelectors.any()).build();
+       }
+
+    /**
+     * Api description.
+     *
+     * @return ApiInfo api Info
+     *
+     */
+       private ApiInfo apiInfo() {
+               return new ApiInfoBuilder().title("DataLake Rest APIs")
+                               .description("This page lists all the rest apis for DataLake.").version("1.0.0-SNAPSHOT").build();
+       }
+}
diff --git a/components/datalake-handler/des/src/main/java/org/onap/datalake/des/controller/DataExposureController.java b/components/datalake-handler/des/src/main/java/org/onap/datalake/des/controller/DataExposureController.java
new file mode 100644 (file)
index 0000000..e71ba6b
--- /dev/null
@@ -0,0 +1,280 @@
+/*
+* ============LICENSE_START=======================================================
+* ONAP : Data Extraction Service
+* ================================================================================
+* Copyright 2020 China Mobile
+*=================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+*/
+
+package org.onap.datalake.des.controller;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.text.StringSubstitutor;
+import org.onap.datalake.des.domain.DataExposure;
+import org.onap.datalake.des.dto.DataExposureConfig;
+import org.onap.datalake.des.repository.DataExposureRepository;
+import org.onap.datalake.des.service.DataExposureService;
+import org.onap.datalake.feeder.controller.domain.PostReturnBody;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.MediaType;
+import org.springframework.validation.BindingResult;
+import org.springframework.web.bind.annotation.DeleteMapping;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.ResponseBody;
+import org.springframework.web.bind.annotation.RestController;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+
+/**
+ * Data Exposure WS.
+ *
+ * @author Kai Lu
+ *
+ */
+@RestController
+@RequestMapping(value = "/exposure", produces = { MediaType.APPLICATION_JSON_VALUE })
+@Api(value = "/exposure", consumes = "application/json", produces = "application/json")
+public class DataExposureController {
+
+       private final Logger log = LoggerFactory.getLogger(this.getClass());
+       @Autowired
+       private DataExposureService dataExposureService;
+       @Autowired
+       private DataExposureRepository dataExposureRepository;
+
+    /**
+     * serve API.
+     *
+     * @param serviceId serviceId
+     * @param requestMap requestMap
+     * @param bindingResult bindingResult
+     * @param response response
+        * @return message that application is started
+        * @throws IOException
+        * @throws SQLException
+     *
+     */
+       @PostMapping("/{serviceId}")
+       @ResponseBody
+       @ApiOperation(value = "Datalake Data Exposure Service.")
+       public HashMap<String, Object> serve(@PathVariable String serviceId, @RequestBody Map<String, String> requestMap,
+                       BindingResult bindingResult, HttpServletResponse response) throws IOException, SQLException {
+               log.info("Going to start Datalake Data Exposure Service ... requestMap=" + requestMap);
+               HashMap<String, Object> ret = new HashMap<>();
+               ret.put("request", requestMap);
+               DataExposure dataExposure = dataExposureService.getDataExposure(serviceId);
+               String sqlTemplate = dataExposure.getSqlTemplate();
+               StringSubstitutor sub = new StringSubstitutor(requestMap);
+               String query = sub.replace(sqlTemplate);
+               log.info("Going to start Datalake Data Exposure Service ... query=" + query);
+               // https://prestodb.io/docs/current/installation/jdbc.html
+               String url = String.format("jdbc:presto://dl-presto:8080/%s/%s", dataExposure.getDb().getPrestoCatalog(),
+                               dataExposure.getDb().getDatabase());
+               Properties properties = new Properties();
+               properties.setProperty("user", "test");
+               // properties.setProperty("password", "secret");
+               // properties.setProperty("SSL", "true");
+               Connection connection = DriverManager.getConnection(url, properties);
+               Statement stmt = connection.createStatement();
+               ResultSet rs = stmt.executeQuery(query);
+               ResultSetMetaData meta = rs.getMetaData();
+               int columnCount = meta.getColumnCount();
+               ArrayList<HashMap<String, Object>> result = new ArrayList<>();
+               int count = 0;
+               while (rs.next()) {
+                       HashMap<String, Object> entry = new HashMap<>();
+                       for (int i = 1; i <= columnCount; i++) {
+                               String label = meta.getColumnLabel(i);
+                               Object value = rs.getObject(i);
+                               entry.put(label, value);
+                               log.info(label + "\t" + value);
+                       }
+                       result.add(entry);
+                       count++;
+               }
+               ret.put("result", result);
+               ret.put("result_count", count);
+               return ret;
+       }
+
+    /**
+     * queryAllDataExposure API.
+     *
+        * @return data exposure config list
+     *
+     */
+       @GetMapping("")
+       @ResponseBody
+       @ApiOperation(value = "Datalake Data Exposure list")
+       public List<DataExposureConfig> queryAllDataExposure() {
+               return dataExposureService.queryAllDataExposure();
+       }
+
+    /**
+     * query API.
+     *
+     * @param id id
+     * @param response HttpServletResponse
+        * @return DataExposureConfig
+        * @throws IOException
+     *
+     */
+       @GetMapping("/{id}")
+       @ResponseBody
+       @ApiOperation(value = "Get Detail of DataExposure")
+       public DataExposureConfig queryAllDataExposure(@PathVariable String id, HttpServletResponse response)
+                       throws IOException {
+               log.info("Get Detail of DataExposure Starting.....");
+               DataExposure oldDataExposure = dataExposureService.getDataExposureById(id);
+               if (oldDataExposure == null) {
+                       sendError(response, 400, "DataExposure not found, ID: " + id);
+                       return null;
+               } else {
+                       log.info("ResponseBody......" + oldDataExposure.getDataExposureConfig());
+                       return oldDataExposure.getDataExposureConfig();
+               }
+       }
+
+    /**
+     * delete Kfaka API.
+     *
+     * @param id id
+     * @param response HttpServletResponse
+        * @throws IOException
+     *
+     */
+       @DeleteMapping("/{id}")
+       @ResponseBody
+       @ApiOperation(value = "delete a dataExposure.")
+       public void deleteKafka(@PathVariable String id, HttpServletResponse response) throws IOException {
+               DataExposure oldDataExposure = dataExposureService.getDataExposureById(id);
+               if (oldDataExposure == null) {
+                       sendError(response, 400, "DataExposure not found, ID: " + id);
+               } else {
+                       dataExposureRepository.delete(oldDataExposure);
+                       response.setStatus(204);
+               }
+       }
+
+    /**
+     * Create a DataExposure.
+     *
+     * @param dataExposureConfig dataExposureConfig
+     * @param result BindingResult
+     * @param response HttpServletResponse
+        * @return DataExposureConfig
+        * @throws IOException
+     *
+     */
+       @PostMapping("")
+       @ResponseBody
+       @ApiOperation(value = "Create a DataExposure.")
+       public PostReturnBody<DataExposureConfig> createDataExposure(@RequestBody DataExposureConfig dataExposureConfig,
+                       BindingResult result, HttpServletResponse response) throws IOException {
+               if (result.hasErrors()) {
+                       sendError(response, 400, "Error parsing DataExposureConfig : " + result.toString());
+                       return null;
+               }
+               DataExposure oldDataExposure = dataExposureService.getDataExposureById(dataExposureConfig.getId());
+               if (oldDataExposure != null) {
+                       sendError(response, 400, "DataExposure is exist " + dataExposureConfig.getId());
+                       return null;
+               } else {
+                       DataExposure dataExposure = null;
+                       try {
+                               dataExposure = dataExposureService.fillDataExposureConfiguration(dataExposureConfig);
+                       } catch (Exception e) {
+                               log.debug("FillDataExposureConfiguration failed", e.getMessage());
+                               sendError(response, 400, "Error FillDataExposureConfiguration: " + e.getMessage());
+                               return null;
+                       }
+                       dataExposureRepository.save(dataExposure);
+                       log.info("Kafka save successed");
+                       return mkPostReturnBody(200, dataExposure);
+               }
+       }
+
+    /**
+     * Update a DataExposure.
+     *
+     * @param dataExposureConfig dataExposureConfig
+     * @param result BindingResult
+     * @param id id
+     * @param response HttpServletResponse
+        * @return DataExposureConfig
+        * @throws IOException
+     *
+     */
+       @PutMapping("/{id}")
+       @ResponseBody
+       @ApiOperation(value = "Update a DataExposure.")
+       public PostReturnBody<DataExposureConfig> updateDataExposure(@RequestBody DataExposureConfig dataExposureConfig,
+                       BindingResult result, @PathVariable String id, HttpServletResponse response) throws IOException {
+               if (result.hasErrors()) {
+                       sendError(response, 400, "Error parsing DataExposureConfig : " + result.toString());
+                       return null;
+               }
+               DataExposure oldDataExposure = dataExposureService.getDataExposureById(id);
+               if (oldDataExposure == null) {
+                       sendError(response, 400, "DataExposure not found: " + id);
+                       return null;
+               } else {
+                       try {
+                               dataExposureService.fillDataExposureConfiguration(dataExposureConfig, oldDataExposure);
+                       } catch (Exception e) {
+                               log.debug("FillDataExposureConfiguration failed", e.getMessage());
+                               sendError(response, 400, "Error FillDataExposureConfiguration: " + e.getMessage());
+                               return null;
+                       }
+                       dataExposureRepository.save(oldDataExposure);
+                       log.info("DataExposure update successed");
+                       return mkPostReturnBody(200, oldDataExposure);
+               }
+       }
+
+       private PostReturnBody<DataExposureConfig> mkPostReturnBody(int statusCode, DataExposure dataExposure) {
+               PostReturnBody<DataExposureConfig> retBody = new PostReturnBody<>();
+               retBody.setStatusCode(statusCode);
+               retBody.setReturnBody(dataExposure.getDataExposureConfig());
+               return retBody;
+       }
+
+       private void sendError(HttpServletResponse response, int sc, String msg) throws IOException {
+               log.info(msg);
+               response.sendError(sc, msg);
+       }
+}
diff --git a/components/datalake-handler/des/src/main/java/org/onap/datalake/des/domain/DataExposure.java b/components/datalake-handler/des/src/main/java/org/onap/datalake/des/domain/DataExposure.java
new file mode 100644 (file)
index 0000000..c134702
--- /dev/null
@@ -0,0 +1,76 @@
+/*
+* ============LICENSE_START=======================================================
+* ONAP : Data Extraction Service
+* ================================================================================
+* Copyright 2020 China Mobile
+*=================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+*/
+
+package org.onap.datalake.des.domain;
+
+import com.fasterxml.jackson.annotation.JsonBackReference;
+import lombok.Getter;
+import lombok.Setter;
+import javax.persistence.*;
+
+import org.onap.datalake.des.dto.DataExposureConfig;
+import org.onap.datalake.feeder.domain.Db;
+
+/**
+ * Domain class representing DataExposure
+ *
+ * @author Kai Lu
+ */
+@Getter
+@Setter
+@Entity
+@Table(name = "data_exposure")
+public class DataExposure {
+
+       @Id
+       @Column(name = "`id`")
+       private String id;
+       @Column(name = "`sql_template`", nullable = false)
+       private String sqlTemplate;
+       @Column(name = "`note`")
+       private String note;
+       @ManyToOne(fetch = FetchType.EAGER)
+       @JoinColumn(name = "db_id", nullable = false)
+       @JsonBackReference
+       private Db db;
+
+       public DataExposure() {
+       }
+
+       public DataExposure(String id, String sqlTemplate) {
+               this.id = id;
+               this.sqlTemplate = sqlTemplate;
+       }
+
+    /**
+     * getDataExposureConfig.
+     *
+        * @return data exposure config
+     *
+     */
+       public DataExposureConfig getDataExposureConfig() {
+               DataExposureConfig dataExposureConfig = new DataExposureConfig();
+               dataExposureConfig.setId(getId());
+               dataExposureConfig.setSqlTemplate(getSqlTemplate());
+               dataExposureConfig.setNote(getNote());
+               dataExposureConfig.setDbId(getDb().getId());
+               return dataExposureConfig;
+       }
+}
diff --git a/components/datalake-handler/des/src/main/java/org/onap/datalake/des/dto/DataExposureConfig.java b/components/datalake-handler/des/src/main/java/org/onap/datalake/des/dto/DataExposureConfig.java
new file mode 100644 (file)
index 0000000..86124f7
--- /dev/null
@@ -0,0 +1,36 @@
+/*
+* ============LICENSE_START=======================================================
+* ONAP : Data Extraction Service
+* ================================================================================
+* Copyright 2020 China Mobile
+*=================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+*/
+
+package org.onap.datalake.des.dto;
+import lombok.Getter;
+import lombok.Setter;
+/**
+ * SON request body for DataExposure manipulation.
+ *
+ * @author Kai Lu
+ */
+@Getter
+@Setter
+public class DataExposureConfig {
+    private String id;
+    private String note;
+    private String sqlTemplate;
+    private Integer dbId;
+}
diff --git a/components/datalake-handler/des/src/main/java/org/onap/datalake/des/repository/DataExposureRepository.java b/components/datalake-handler/des/src/main/java/org/onap/datalake/des/repository/DataExposureRepository.java
new file mode 100644 (file)
index 0000000..b77e5d2
--- /dev/null
@@ -0,0 +1,36 @@
+/*
+* ============LICENSE_START=======================================================
+* ONAP : Data Extraction Service
+* ================================================================================
+* Copyright 2020 China Mobile
+*=================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+*/
+
+package org.onap.datalake.des.repository;
+
+import org.onap.datalake.des.domain.DataExposure;
+import org.springframework.data.repository.CrudRepository;
+
+/**
+ * 
+ * DataExposure Repository 
+ * 
+ * @author Kai Lu
+ *
+ */ 
+
+public interface DataExposureRepository extends CrudRepository<DataExposure, String> {
+
+}
diff --git a/components/datalake-handler/des/src/main/java/org/onap/datalake/des/service/DataExposureService.java b/components/datalake-handler/des/src/main/java/org/onap/datalake/des/service/DataExposureService.java
new file mode 100644 (file)
index 0000000..c7d642b
--- /dev/null
@@ -0,0 +1,131 @@
+/*
+* ============LICENSE_START=======================================================
+* ONAP : Data Extraction Service
+* ================================================================================
+* Copyright 2020 China Mobile
+*=================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+*/
+
+package org.onap.datalake.des.service;
+
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import org.onap.datalake.des.domain.DataExposure;
+import org.onap.datalake.des.dto.DataExposureConfig;
+import org.onap.datalake.des.repository.DataExposureRepository;
+import org.onap.datalake.feeder.domain.Db;
+import org.onap.datalake.feeder.repository.DbRepository;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+/**
+ * Service for DataExposure
+ *
+ * @author Kai Lu
+ *
+ */
+@Service
+public class DataExposureService {
+
+       private final Logger log = LoggerFactory.getLogger(this.getClass());
+       @Autowired
+       private DataExposureRepository dataExposureRepository;
+       @Autowired
+       private DbRepository dbRepository;
+
+    /**
+     * getDataExposure.
+     *
+     * @param serviceId serviceId
+     *
+        * @return DataExposure
+     *
+     */
+       public DataExposure getDataExposure(String serviceId) {
+               Optional<DataExposure> ret = dataExposureRepository.findById(serviceId);
+               return ret.isPresent() ? ret.get() : null;
+       }
+
+       public List<DataExposureConfig> queryAllDataExposure() {
+               List<DataExposure> dataExposureList = null;
+               List<DataExposureConfig> dataExposureConfigList = new ArrayList<>();
+               dataExposureList = (List<DataExposure>) dataExposureRepository.findAll();
+               if (!dataExposureList.isEmpty()) {
+                       log.info("DataExposureList is not null");
+                       for (DataExposure dataExposure : dataExposureList) {
+                               dataExposureConfigList.add(dataExposure.getDataExposureConfig());
+                       }
+               }
+               return dataExposureConfigList;
+       }
+
+    /**
+     * getDataExposureById.
+     *
+     * @param id id
+     *
+        * @return data exposure
+     *
+     */
+       public DataExposure getDataExposureById(String id) {
+               Optional<DataExposure> ret = dataExposureRepository.findById(id);
+               return ret.isPresent() ? ret.get() : null;
+       }
+
+    /**
+     * fillDataExposureConfiguration.
+     *
+     * @param dataExposureConfig DataExposureConfig
+     *
+        * @return data exposure
+     *
+     */
+       public DataExposure fillDataExposureConfiguration(DataExposureConfig dataExposureConfig) {
+               DataExposure dataExposure = new DataExposure();
+               fillDataExposure(dataExposureConfig, dataExposure);
+               return dataExposure;
+       }
+
+    /**
+     * fillDataExposureConfiguration.
+     *
+     * @param dataExposureConfig DataExposureConfig
+     * @param dataExposure DataExposure
+     *
+        * @return data exposure
+     *
+     */
+       public void fillDataExposureConfiguration(DataExposureConfig dataExposureConfig, DataExposure dataExposure) {
+               fillDataExposure(dataExposureConfig, dataExposure);
+       }
+
+       private void fillDataExposure(DataExposureConfig dataExposureConfig, DataExposure dataExposure)
+                       throws IllegalArgumentException {
+               dataExposure.setId(dataExposureConfig.getId());
+               dataExposure.setNote(dataExposureConfig.getNote());
+               dataExposure.setSqlTemplate(dataExposureConfig.getSqlTemplate());
+               if (dataExposureConfig.getDbId() == null)
+                       throw new IllegalArgumentException("Can not find db_id in db, db_id: " + dataExposureConfig.getDbId());
+               Optional<Db> dbOptional = dbRepository.findById(dataExposureConfig.getDbId());
+               if (!dbOptional.isPresent())
+                       throw new IllegalArgumentException("db_id is null " + dataExposureConfig.getDbId());
+               dataExposure.setDb(dbOptional.get());
+       }
+}
diff --git a/components/datalake-handler/des/src/main/resources/application.properties b/components/datalake-handler/des/src/main/resources/application.properties
new file mode 100644 (file)
index 0000000..c0997e7
--- /dev/null
@@ -0,0 +1,73 @@
+#####################App general
+server.port = 16810
+server.servlet.context-path = /datalake/v1
+
+#tolerate inconsistency when system crash, see PullThread.run()
+async=true
+
+#SSL global flag, if enabled, still need to check each individual DB SSL flag
+enableSSL=false
+
+#names for extra fields that DL adds to each record
+timestampLabel=datalake_ts_
+rawDataLabel=datalake_text_
+
+defaultTopicName=_DL_DEFAULT_
+
+#####################Spring connection to MariaDB for ORM
+#spring.jpa.hibernate.ddl-auto=update
+spring.jpa.hibernate.ddl-auto=none
+spring.jpa.show-sql=false
+
+#spring.datasource.driver-class-name=com.mysql.jdbc.Driver
+spring.datasource.url=jdbc:mariadb://mariadb-galera:3306/datalake?autoReconnect=true&amp;useUnicode=true&amp;characterEncoding=UTF-8
+spring.datasource.username=dl
+spring.datasource.password=dl1234
+
+
+#####################DMaaP
+dmaapZookeeperHostPort=message-router-zookeeper:2181
+dmaapKafkaHostPort=message-router-kafka:9092
+dmaapKafkaGroup=dlgroup44
+#dmaapKafkaLogin=admin
+#dmaapKafkaPass=admin-secret
+#dmaapKafkaSecurityProtocol=SASL_PLAINTEXT
+
+#in second
+dmaapKafkaTimeout=10
+dmaapKafkaExclude[0]=__consumer_offsets
+dmaapKafkaExclude[1]=__transaction_state
+#dmaapKafkaExclude[2]=msgrtr.apinode.metrics.dmaap
+#check for new topics , in millisecond
+dmaapCheckNewTopicInterval=10000
+
+kafkaConsumerCount=3
+
+#####################Elasticsearch
+elasticsearchType=doc
+
+#####################HDFS
+hdfsBufferSize=4096
+#how often we flush stall updates, in millisecond
+hdfsFlushInterval=30000
+hdfsBatchSize=500
+
+#####################Logging
+logging.level.org.springframework.web=ERROR
+logging.level.com.att.nsa.apiClient.http=ERROR
+logging.level.org.onap.datalake=DEBUG
+
+#####################Verison
+datalakeVersion=0.0.1
+
+#####################KibanaDashboardImportApi
+kibanaDashboardImportApi=/api/kibana/dashboards/import?exclude=index-pattern
+
+#####################KibanaPort
+kibanaPort=5601
+
+#####################Elasticsearch Template API
+esTemplateMappingApi=/_template/
+
+#####################Elasticsearch port
+esPort=9200
diff --git a/components/datalake-handler/des/src/main/resources/logback.xml b/components/datalake-handler/des/src/main/resources/logback.xml
new file mode 100644 (file)
index 0000000..436f4f0
--- /dev/null
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<configuration>
+<!-- https://logback.qos.ch/manual/layouts.html -->
+       <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+               <layout class="ch.qos.logback.classic.PatternLayout">
+                       <Pattern>
+                               %date |%-5level| [%20.20thread] %-40(%logger:%line) - %msg%n%ex{full}
+                       </Pattern>
+               </layout>
+       </appender>
+
+       <logger name="org.onap.datalake" level="debug"
+               additivity="false">
+               <appender-ref ref="STDOUT" />
+       </logger>
+
+       <root level="error">
+               <appender-ref ref="STDOUT" />
+       </root>
+</configuration>
\ No newline at end of file
diff --git a/components/datalake-handler/des/src/main/resources/swagger.json b/components/datalake-handler/des/src/main/resources/swagger.json
new file mode 100644 (file)
index 0000000..017f04f
--- /dev/null
@@ -0,0 +1,67 @@
+{
+       "swagger": "2.0",
+       "info": {
+               "description": "This page lists all the rest apis for DataLake.",
+               "version": "1.2.0-SNAPSHOT",
+               "title": "DataLake Exposure Service Rest APIs"
+       },
+       "host": "r-node-1:31157/datalake/v1/",
+       "basePath": "/",
+       "tags": [{
+               "name": "des-controller",
+               "description": "DES Controller"
+       }],
+       "paths": {
+               "/exposure/{serviceId}": {
+                       "post": {
+                               "tags": ["des-controller"],
+                               "summary": "Datalake Data Exposure Service.",
+                               "operationId": "serveUsingPOST",
+                               "consumes": ["application/json"],
+                               "produces": ["application/json"],
+                               "parameters": [{
+                                       "in": "body",
+                                       "name": "requestMap",
+                                       "description": "requestMap",
+                                       "required": true,
+                                       "schema": {
+                                               "type": "object",
+                                               "additionalProperties": {
+                                                       "type": "string"
+                                               }
+                                       }
+                               }, {
+                                       "name": "serviceId",
+                                       "in": "path",
+                                       "description": "serviceId",
+                                       "required": true,
+                                       "type": "string"
+                               }],
+                               "responses": {
+                                       "200": {
+                                               "description": "OK",
+                                               "schema": {
+                                                       "type": "object",
+                                                       "additionalProperties": {
+                                                               "type": "object"
+                                                       }
+                                               }
+                                       },
+                                       "201": {
+                                               "description": "Created"
+                                       },
+                                       "401": {
+                                               "description": "Unauthorized"
+                                       },
+                                       "403": {
+                                               "description": "Forbidden"
+                                       },
+                                       "404": {
+                                               "description": "Not Found"
+                                       }
+                               },
+                               "deprecated": false
+                       }
+               }
+       }
+}
\ No newline at end of file