Add des rst 70/114870/1
authorKai <lukai@chinamobile.com>
Fri, 13 Nov 2020 09:03:52 +0000 (17:03 +0800)
committerKai <lukai@chinamobile.com>
Fri, 13 Nov 2020 09:05:51 +0000 (17:05 +0800)
Issue-ID: DCAEGEN2-2507
Signed-off-by: Kai Lu <lukai@chinamobile.com>
Change-Id: I63161a3d2f7364c3689b93de25c4251c6e06d7c5

13 files changed:
docs/sections/apis/des_api.json [new file with mode: 0644]
docs/sections/apis/des_api.rst [new file with mode: 0644]
docs/sections/offeredapis.rst
docs/sections/release-notes.rst
docs/sections/services/datalake-handler/DL-DES.PNG
docs/sections/services/datalake-handler/arch.PNG
docs/sections/services/datalake-handler/dbschema.PNG
docs/sections/services/datalake-handler/des-arch.PNG [new file with mode: 0644]
docs/sections/services/datalake-handler/images/blueprint-list.png
docs/sections/services/datalake-handler/images/des-log.png [new file with mode: 0644]
docs/sections/services/datalake-handler/index.rst
docs/sections/services/datalake-handler/installation.rst
docs/sections/services/datalake-handler/overview.rst

diff --git a/docs/sections/apis/des_api.json b/docs/sections/apis/des_api.json
new file mode 100644 (file)
index 0000000..0db3b4d
--- /dev/null
@@ -0,0 +1,67 @@
+{
+    "swagger": "2.0",
+    "info": {
+        "description": "This page lists all the rest apis for DataLake.",
+        "version": "1.1.0",
+        "title": "DataLake Extraction Service Rest APIs"
+    },
+    "host": "r-node-1:31157/datalake/v1/",
+    "basePath": "/",
+    "tags": [{
+        "name": "des-controller",
+        "description": "DES Controller"
+    }],
+    "paths": {
+        "/exposure/{serviceId}": {
+            "post": {
+                "tags": ["des-controller"],
+                "summary": "Datalake Data Extraction Service.",
+                "operationId": "serveUsingPOST",
+                "consumes": ["application/json"],
+                "produces": ["application/json"],
+                "parameters": [{
+                    "in": "body",
+                    "name": "requestMap",
+                    "description": "requestMap",
+                    "required": true,
+                    "schema": {
+                        "type": "object",
+                        "additionalProperties": {
+                            "type": "string"
+                        }
+                    }
+                }, {
+                    "name": "serviceId",
+                    "in": "path",
+                    "description": "serviceId",
+                    "required": true,
+                    "type": "string"
+                }],
+                "responses": {
+                    "200": {
+                        "description": "OK",
+                        "schema": {
+                            "type": "object",
+                            "additionalProperties": {
+                                "type": "object"
+                            }
+                        }
+                    },
+                    "201": {
+                        "description": "Created"
+                    },
+                    "401": {
+                        "description": "Unauthorized"
+                    },
+                    "403": {
+                        "description": "Forbidden"
+                    },
+                    "404": {
+                        "description": "Not Found"
+                    }
+                },
+                "deprecated": false
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/docs/sections/apis/des_api.rst b/docs/sections/apis/des_api.rst
new file mode 100644 (file)
index 0000000..57e68ca
--- /dev/null
@@ -0,0 +1,28 @@
+.. This work is licensed under a
+   Creative Commons Attribution 4.0 International License.
+
+=================================
+DES (DataLake Extraction Service)
+=================================
+
+:Date: 2020-11-11
+
+.. contents::
+    :depth: 3
+
+Overview
+========
+
+Component description is included in `DES`_.
+
+.. _DES: ../../sections/services/datalake-handler/index.html
+
+Offered APIs
+============
+
+.. csv-table::
+  :header: "API name", "Swagger JSON"
+  :widths: 10,5
+
+   "Datafile Collector API", ":download:`link <des_api.json>`"
+
index 07ec012..0ab3a93 100644 (file)
@@ -19,3 +19,4 @@ Offered APIs
    apis/PMSH.rst
    apis/SDK.rst
    apis/mod-onboardingapi.rst
+   apis/des_api.rst
index 1380382..0b908c5 100644 (file)
@@ -73,7 +73,7 @@ Below service components (mS) are available to be deployed on-demand.
        - PM-Subscription Handler
        - DataLake Handler (Admin and Feeder)
        - Slice Analysis
-       - Data Extraction Service
+       - DataLake Extraction Service
 
     Notes:
 
@@ -129,7 +129,7 @@ New features
 - Following new services are delivered this release
 
     - Event Processors
-        - DataExposure Service 
+        - DataLake Extraction Service 
        
     - Analytics/RCA
         - Slice Analysis MS
index aa3c494..ca3f419 100644 (file)
Binary files a/docs/sections/services/datalake-handler/DL-DES.PNG and b/docs/sections/services/datalake-handler/DL-DES.PNG differ
index eb809c6..e61c89e 100644 (file)
Binary files a/docs/sections/services/datalake-handler/arch.PNG and b/docs/sections/services/datalake-handler/arch.PNG differ
index 14c0d93..a9268b8 100644 (file)
Binary files a/docs/sections/services/datalake-handler/dbschema.PNG and b/docs/sections/services/datalake-handler/dbschema.PNG differ
diff --git a/docs/sections/services/datalake-handler/des-arch.PNG b/docs/sections/services/datalake-handler/des-arch.PNG
new file mode 100644 (file)
index 0000000..02ffd64
Binary files /dev/null and b/docs/sections/services/datalake-handler/des-arch.PNG differ
index e934205..7e44c72 100644 (file)
Binary files a/docs/sections/services/datalake-handler/images/blueprint-list.png and b/docs/sections/services/datalake-handler/images/blueprint-list.png differ
diff --git a/docs/sections/services/datalake-handler/images/des-log.png b/docs/sections/services/datalake-handler/images/des-log.png
new file mode 100644 (file)
index 0000000..128a749
Binary files /dev/null and b/docs/sections/services/datalake-handler/images/des-log.png differ
index 3b445a5..7fee00c 100644 (file)
@@ -9,6 +9,7 @@ DataLake-Handler MS
 It has a Admin UI, where a system administrator configures which Topics to be monitored, and to which data storage to store the data. 
 It is also used to manage the settings of the storage and associated data analytics tool. 
 The second part is the Feeder, which does the data transfer work and is horizontal scalable. 
+The third part, Data Extraction Service (DES), which will expose the data in the data storage via REST API for other ONAP components and external systems to consume.
 
 .. image:: DL-DES.PNG
 
index 16294b9..5ceca35 100644 (file)
@@ -1,11 +1,31 @@
 Deployment Steps
+
 ################
-DL-handler consists of two pods- the feeder and admin UI. It can be deployed by using cloudify blueprint. Datalake can be easily deployed through DCAE cloudify manager. The following steps guides you launch Datalake though cloudify manager.
+DL-handler consists of three pods- the feeder, admin UI and des. It can be deployed by using cloudify blueprint. Datalake can be easily deployed through DCAE cloudify manager. The following steps guides you launch Datalake though cloudify manager.
 
 Pre-requisite
 -------------
-- Make sure mariadb-galera from OOM is properly deployed and functional.
-- An external database, such as Elasticsearch and MongoDB is deployed.
+Make sure mariadb-galera from OOM is properly deployed and functional.
+An external database, such as Elasticsearch and MongoDB is deployed. Install mongodb through the following command.
+
+     #docker run -itd --restart=always --name dl-mongo -p 27017:27017 mongo
+
+For DES service deployment, presto service is deployed. Here is a sample how presto deploy in the environment.
+    Build a presto image:
+      The package of presto version we are using is v0.0.2:presto-v0.0.2.tar.gz 
+
+        #docker build -t presto:v0.0.2 .
+        #docker tag presto:v0.0.2 registry.baidubce.com/onap/presto:v0.0.2
+        #docker push registry.baidubce.com/onap/presto:v0.0.2
+
+    Note: Replace the repository path with your own repository. 
+    
+    Install presto service:
+
+        #kubectl -n onap run dl-presto --image=registry.baidubce.com/onap/presto:v0.0.2 --env="MongoDB_IP=192.168.235.11" --env="MongoDB_PORT=27017" 
+        #kubectl -n onap expose deployment dl-presto --port=9000 --target-port=9000 --type=NodePort
+
+    Note: MonoDB_IP and Mongo_PORT you can replace this two values with your own configuration.
 
 After datalake getting deployed, the admin UI can be used to configure the sink database address and credentials.
 
@@ -21,20 +41,22 @@ Login to the DCAE bootstrap pod through the following command.
      #kubectl exec -it <DCAE bootstrap pod> /bin/bash -n onap
 
 Validate Blueprint
--------------------
-Before the blueprints uploading to Cloudify manager, the blueprints shoule be validated first throuhg the following command.
+------------------
+Before the blueprints uploading to Cloudify manager, the blueprints shoule be validated first through the following command.
   .. code-block :: bash
 
     #cfy blueprint validate /bluerints/k8s-datalake-feeder.yaml
     #cfy blueprint validate /blueprints/k8s-datalake-admin-ui.yaml
+    #cfy blueprint validate /blueprints/k8s-datalake-des.yaml
 
 Upload the Blueprint to Cloudify Manager.
 -----------------------------------------
 After validating, we can start to proceed blueprints uploading.
   .. code-block :: bash
 
-     #cfy blueprint upload -b datalake-feeder /bluerints/k8s-datalake-feeder.yaml
-     #cfy blueprint upload -b datalake-admin-ui /blueprints/k8s-datalake-admin-ui.yaml
+     #cfy blueprint upload -b dl-feeder /bluerints/k8s-datalake-feeder.yaml
+     #cfy blueprint upload -b dl-admin-ui /blueprints/k8s-datalake-admin-ui.yaml
+     #cfy blueprint upload -b des /blueprints/k8s-datalake-des.yaml
 
 Verify Uploaded Blueprints
 --------------------------
@@ -59,20 +81,22 @@ Create Deployment
 Here we are going to create deployments for both feeder and admin UI.
   .. code-block :: bash
 
-     #cfy deployments create -b datalake-feeder feeder-deploy
-     #cfy deployments create -b datalake-admin-ui admin-ui-deploy
+     #cfy deployments create -b dl-feeder feeder-deploy
+     #cfy deployments create -b dl-admin-ui admin-ui-deploy
+     #cfy deployments create -b des des
 
 Launch Service
----------------
+--------------
 Next, we are going to launch the datalake.
   .. code-block :: bash
 
      #cfy executions start -d feeder-deploy install
      #cfy executions start -d admin-ui-deploy install
+     #cfy executions start -d des install
 
 
 Verify the Deployment Result
------------------------------
+----------------------------
 The following command can be used to list the datalake logs.
 
   .. code-block :: bash
@@ -82,21 +106,26 @@ The following command can be used to list the datalake logs.
 The output should looks like.
     .. image :: ./images/feeder-log.png
 
+The des output should looks like.
+    .. image :: ./des-log.png
+
 If you find any Java exception from log, make sure that the external database and datalake configuration are properly configured.
 Admin UI can be used to configure the external database configuration.
 
 
 Uninstall
-----------
+---------
 Uninstall running component and delete deployment
   .. code-block :: bash
 
      #cfy uninstall feeder-deploy
      #cfy uninstall admin-ui-deploy
+     #cfy uninstall des
 
 Delete Blueprint
-------------------
+----------------
   .. code-block :: bash
 
-     #cfy blueprints delete datalake-feeder
-     #cfy blueprints delete datalake-admin-ui
+     #cfy blueprints delete dl-feeder
+     #cfy blueprints delett dl-admin-ui
+     #cfy blueprints delete des
index 09e41a5..101cb46 100644 (file)
@@ -39,7 +39,7 @@ without interacting with DataLake Handler.
 \r
 Description\r
 ~~~~~~~~~~~\r
-DataLate Handler's main function is to monitor and persist data flow through DMaaP. The databases are outside of ONAP scope, \r
+DataLake Handler's main function is to monitor and persist data flow through DMaaP and provide a query API for other component or external services. The databases are outside of ONAP scope, \r
 since the data is expected to be huge, and a database may be a complicated cluster consisting of thousand of nodes.\r
 \r
 Admin UI\r
@@ -67,8 +67,18 @@ Features
    - Support data processing features. Before persisting data, data can be massaged in Feeder. Currently two features are implemented: Correlate Cleared Message (in org.onap.datalake.feeder.service.db.ElasticsearchService)  and Flatten JSON Array (org.onap.datalake.feeder.service.StoreService).\r
    - Connection to Kafka and DBs are secured\r
 \r
+Des\r
+~~~\r
+Architecture\r
+.. image:: ./des-arch.PNG\r
+\r
+Features\r
+\r
+   - Provide a data query API for other components to consume.\r
+   - Integrate with Presto to do data query via sql template.\r
 \r
 Links\r
 ~~~~~\r
    - DataLake Development Environment Setup https://wiki.onap.org/display/DW/DataLake+Development+Environment+Setup\r
+   - Des description and deployment steps: https://wiki.onap.org/display/DW/DES\r
    - Source Code https://gerrit.onap.org/r/gitweb?p=dcaegen2/services.git;a=tree;f=components/datalake-handler;hb=HEAD\r