Docker-compose for heat integration lab 33/70233/5
authorac2550 <ac2550@intl.att.com>
Thu, 11 Oct 2018 07:27:15 +0000 (09:27 +0200)
committerac2550 <ac2550@intl.att.com>
Thu, 11 Oct 2018 10:04:08 +0000 (12:04 +0200)
Issue-ID: CLAMP-230
Change-Id: If825138e82f814e4c4804001f99da807d87f27c9
Signed-off-by: ac2550 <ac2550@intl.att.com>
extra/docker/heat/clamp.env [new file with mode: 0644]
extra/docker/heat/docker-compose.yml [new file with mode: 0644]
src/main/docker/kibana/Dockerfile.kibana
src/main/docker/kibana/startup.sh

diff --git a/extra/docker/heat/clamp.env b/extra/docker/heat/clamp.env
new file mode 100644 (file)
index 0000000..abca267
--- /dev/null
@@ -0,0 +1,2 @@
+### Be careful, this must be in one line only ###
+SPRING_APPLICATION_JSON={"spring.datasource.cldsdb.url":"jdbc:mariadb:sequential://db:3306/cldsdb4?autoReconnect=true&connectTimeout=10000&socketTimeout=10000&retriesAllDown=3","clamp.config.policy.pdpUrl1":"https://policy.api.simpledemo.onap.org:8081/pdp/ , testpdp, alpha123","clamp.config.policy.pdpUrl2":"https://policy.api.simpledemo.onap.org:8081/pdp/ , testpdp, alpha123","clamp.config.policy.papUrl":"https://policy.api.simpledemo.onap.org:9091/pap/ , testpap, alpha123"}
diff --git a/extra/docker/heat/docker-compose.yml b/extra/docker/heat/docker-compose.yml
new file mode 100644 (file)
index 0000000..109a036
--- /dev/null
@@ -0,0 +1,84 @@
+version: '2'
+
+services:
+  db:
+    image: mariadb:10.1.11
+    volumes:
+      - "/var/lib/mysql"
+      - "../mariadb/conf1:/etc/mysql/conf.d:ro"
+      - "../../sql/:/docker-entrypoint-initdb.d:ro"
+    environment:
+      - MYSQL_ROOT_PASSWORD=strong_pitchou
+    ports:
+      - "3306:3306"
+    networks:
+      clamp_net:
+
+  clamp:
+    image: onap/clamp:latest
+    volumes:
+      - "./config/:/opt/clamp/config:rw"
+    depends_on:
+      - db
+    env_file:
+      - clamp.env
+    ports:
+      - "8080:8080"
+      - "8443:8443"
+    networks:
+      clamp_net:
+
+  elasticsearch:
+    image: docker.elastic.co/elasticsearch/elasticsearch-oss:6.1.3
+    ports:
+      - 9200:9200
+    networks:
+      cldash_net:
+        aliases:
+          - elasticsearch
+    environment:
+      - cluster.name=docker-cluster
+      - bootstrap.memory_lock=false
+      - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
+
+  logstash:
+    image: onap/clamp-dashboard-logstash:latest
+    build:
+      context: ../../../src/main/docker/logstash
+      dockerfile: Dockerfile.logstash
+    # volumes:
+    #   - ../../../src/main/docker/logstash/pipeline:/usr/share/logstash/pipeline
+    #   - ./logstash-input:/log-input
+    depends_on:
+      - elasticsearch
+    networks:
+      cldash_net:
+    environment:
+      - elasticsearch_host=https://elasticsearch:9200/
+      - dmaap_base_url=https://ueb.api.simpledemo.onap.org:3905/
+      # - dmaap_user=user
+      # - dmaap_password=password
+      - dmaap_consumer_group=clampdashboard
+      - dmaap_consumer_id=clampdashboard
+      - event_topic=DCAE-CL-EVENT
+      - notification_topic=POLICY-CL-MGT
+      - request_topic=APPC-CL
+      - elasticsearch_base_url=elasticsearch
+
+  kibana:
+    image: onap/clamp-dashboard-kibana:latest
+    build:
+      context: ../../../src/main/docker/kibana
+      dockerfile: Dockerfile.kibana
+    ports:
+      - 5601:5601
+    depends_on:
+      - elasticsearch
+    # volumes:
+    #   - ../../../src/main/docker/kibana/saved-objects/:/saved-objects/
+    networks:
+      cldash_net:
+
+networks:
+  cldash_net:
+  clamp_net:
index 993edd4..6df0204 100644 (file)
@@ -47,7 +47,7 @@ RUN yum install -y python-requests && yum clean all
 #         ├── visualization-cb896270-c190-11e8-a550-27f2e3138fee.json
 #         └── visualization-d837b120-c190-11e8-a550-27f2e3138fee.json
 
-RUN mkdir /saved-objects/
+RUN mkdir /saved-objects/ && chown kibana:kibana /saved-objects/
 
 USER kibana
 
index bbd9d45..a232706 100755 (executable)
@@ -30,9 +30,25 @@ LOG_FILE="/tmp/load.kibana.log"
 KIBANA_LOAD_CMD="/usr/local/bin/kibana-docker -H 127.0.0.1 -l $LOG_FILE"
 TIMEOUT=60
 WAIT_TIME=2
+LOADED_FLAG=$SAVED_OBJECTS_ROOT/.loaded
 
-if [ -n "$(ls -A ${SAVED_OBJECTS_PATH})" ];
+if [ -f $LOADED_FLAG ];
 then
+    echo "---- Kibana saved objects already restored. Remove $LOADED_FLAG if you want to restore them again."
+elif [ -n "$(ls -A ${SAVED_OBJECTS_PATH})" ];
+then
+    echo "---- Waiting for elasticsearch to be up..."
+    RES=-1
+    PING_TIMEOUT=60
+    elastic_url=$(grep elasticsearch.url /usr/share/kibana/config/kibana.yml | cut -d\  -f2)
+    while [ ! "$RES" -eq "0" ] && [ "$PING_TIMEOUT" -gt "0" ];
+    do
+        curl $elastic_url
+        RES=$?
+        sleep $WAIT_TIME
+        let PING_TIMEOUT=$PING_TIMEOUT-$WAIT_TIME
+    done
+
     echo "---- Saved objects found, restoring files."
 
     $KIBANA_LOAD_CMD &
@@ -62,10 +78,19 @@ then
     # restore files
     for saved_objects_path in $SAVED_OBJECTS_ROOT/*
     do
+        # skip files as we only need directories
+        [ -f $saved_objects_path ] && continue
+
         echo "Restoring content of $saved_objects_path"
         $RESTORE_CMD -C $saved_objects_path
         sleep 1
     done
+    
+    touch $LOADED_FLAG
+    if [ "$?" != "0" ];
+    then
+        echo "WARNING: Could not save $LOADED_FLAG, saved objects will be restored on next startup." >&2
+    fi
 
     # cleanup
     kill $KIB_PID