2 * ============LICENSE_START=======================================================
4 * ================================================================================
5 * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
6 * ================================================================================
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 * ============LICENSE_END=========================================================
21 package org.openecomp.sdc.asdctool.main;
23 import org.apache.commons.codec.digest.DigestUtils;
24 import org.apache.commons.lang3.ArrayUtils;
25 import org.openecomp.sdc.asdctool.configuration.SdcSchemaFileImportConfiguration;
26 import org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum;
27 import org.openecomp.sdc.be.config.ConfigurationManager;
28 import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
29 import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
30 import org.openecomp.sdc.be.resources.data.SdcSchemaFilesData;
31 import org.openecomp.sdc.common.api.ConfigurationSource;
32 import org.openecomp.sdc.common.impl.ExternalConfiguration;
33 import org.openecomp.sdc.common.impl.FSConfigurationSource;
34 import org.springframework.context.annotation.AnnotationConfigApplicationContext;
35 import org.yaml.snakeyaml.DumperOptions;
36 import org.yaml.snakeyaml.Yaml;
38 import java.io.ByteArrayOutputStream;
40 import java.io.FileInputStream;
41 import java.io.FileWriter;
42 import java.io.IOException;
43 import java.io.InputStream;
44 import java.nio.file.FileSystems;
45 import java.nio.file.Files;
46 import java.nio.file.Path;
47 import java.nio.file.Paths;
48 import java.util.Date;
49 import java.util.LinkedHashMap;
51 import java.util.stream.Stream;
52 import java.util.zip.ZipEntry;
53 import java.util.zip.ZipOutputStream;
56 public class SdcSchemaFileImport {
58 private static final String SEPARATOR = FileSystems.getDefault().getSeparator();
60 private static final String TOSCA_VERSION = "tosca_simple_yaml_1_1";
62 private static String importToscaPath;
64 private static final byte[] buffer = new byte[1024];
66 private static final String YAML_EXTENSION = ".yml";
68 private static final String DEPLOYMENT_TYPE_ONAP = "onap";
70 private static String LICENSE_TXT;
72 private static ZipOutputStream zos;
74 public static void main(String[] args) throws Exception {
76 //Generation flow start - generating SDC from normatives
77 System.out.println("Starting SdcSchemaFileImport procedure...");
78 final String FILE_NAME = "SDC.zip";
80 if (args == null || !(args.length ==4 || args.length == 5 )) {
84 importToscaPath = args[0];
85 String sdcReleaseNum = args[1];
86 String conformanceLevel = args[2];
87 String appConfigDir = args[3];
88 String deploymentType=null;
90 deploymentType=args[4];
94 ByteArrayOutputStream baos = new ByteArrayOutputStream();
96 zos = new ZipOutputStream(baos);
98 //Initialize the license text
100 LICENSE_TXT = new String(Files.readAllBytes(Paths.get(appConfigDir + SEPARATOR+"license.txt")));
103 System.err.println("Couldn't read license.txt in location :" + appConfigDir+", error: "+e);
107 //Loop over schema file list and create each yaml file from /import/tosca folder
108 SchemaZipFileEnum[] schemaFileList = SchemaZipFileEnum.values();
109 for (SchemaZipFileEnum schemaZipFileEnum : schemaFileList) {
111 //get the source yaml file
112 String pathname = importToscaPath + SEPARATOR + schemaZipFileEnum.getSourceFolderName() + SEPARATOR + schemaZipFileEnum.getSourceFileName() + YAML_EXTENSION;
113 System.out.println("Processing file "+pathname+"....");
114 InputStream input = new FileInputStream(new File(pathname));
115 //Convert the content of file to yaml
116 Yaml yamlFileSource = new Yaml();
117 Object content = yamlFileSource.load(input);
119 createAndSaveSchemaFileYaml(schemaZipFileEnum, content);
122 System.err.println("Error in file creation : " + schemaZipFileEnum.getFileName() + ", " + e.getMessage());
127 createAndSaveNodeSchemaFile(deploymentType);
130 //close the ZipOutputStream
132 System.out.println("File SDC.zip creation successful");
134 } catch(Exception ex) {
135 System.err.println("Failed to pack SDC.zip file, error: "+ex);
139 //Generation flow end - generating SDC from narratives
141 AnnotationConfigApplicationContext context = initContext(appConfigDir);
142 SdcSchemaFilesCassandraDao schemaFilesCassandraDao = (SdcSchemaFilesCassandraDao) context.getBean("sdc-schema-files-cassandra-dao");
144 byte[] fileBytes = baos.toByteArray();
146 Date date = new Date();
147 String md5Hex = DigestUtils.md5Hex(fileBytes);
149 SdcSchemaFilesData schemeFileData = new SdcSchemaFilesData(sdcReleaseNum, date, conformanceLevel, FILE_NAME, fileBytes, md5Hex);
150 CassandraOperationStatus saveSchemaFile = schemaFilesCassandraDao.saveSchemaFile(schemeFileData);
152 if(!saveSchemaFile.equals(CassandraOperationStatus.OK)) {
153 System.err.println("SdcSchemaFileImport failed cassandra error" + saveSchemaFile);
157 System.out.println("SdcSchemaFileImport successfully completed");
162 public static void createAndSaveSchemaFileYaml(SchemaZipFileEnum schemaZipFileEnum, Object content) {
163 createAndSaveSchemaFileYaml(schemaZipFileEnum.getFileName(), schemaZipFileEnum.getImportFileList(), schemaZipFileEnum.getCollectionTitle(), content);
166 public static void createAndSaveSchemaFileYaml(String fileName, String[] importFileList, String collectionTitle, Object content) {
168 //Initialize the snake yaml dumper option
169 DumperOptions options = new DumperOptions();
170 options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
172 //Create the new yaml
173 Yaml yaml = new Yaml(options);
174 yaml.setName(fileName);
176 //Initialize the yaml contents
177 Map<String, Object> data = new LinkedHashMap<>();
179 data.put("tosca_definitions_version", TOSCA_VERSION);
181 if (importFileList.length > 0) {
182 data.put("imports", importFileList);
185 data.put(collectionTitle, content);
187 //Save the new yaml to file
191 File file = File.createTempFile(fileName, YAML_EXTENSION);
192 writer = new FileWriter(file);
194 //Add the license as comment in top of file
195 writer.write(LICENSE_TXT);
197 yaml.dump(data, writer);
201 // begin writing a new ZIP entry, positions the stream to the start of the entry data
202 ZipEntry entry = new ZipEntry(yaml.getName() + YAML_EXTENSION);
203 zos.putNextEntry(entry);
204 FileInputStream stream = new FileInputStream(file.getAbsolutePath());
206 while ((len = stream.read(buffer)) > 0) {
207 zos.write(buffer, 0, len);
209 //close the InputStream
215 } catch (IOException e) {
216 System.out.println("Error in file creation : " + fileName + ", " + e.getMessage());
222 *the method is responsible for creating and storing the sdc normatives in the DB
223 * @param deploymentType if the deployments type is onap the onap narratives will be add to the zip
224 * @throws IOException thrown in case of issues in reding files.
226 public static void createAndSaveNodeSchemaFile(String deploymentType) throws IOException {
228 //Initialize the snake yaml dumper option
229 DumperOptions options = new DumperOptions();
230 options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
232 Map<String, Object> nodeTypeList = new LinkedHashMap<>();
234 String[] importFileList = new String[]{"data.yml", "artifacts.yml", "capabilities.yml", "interfaces.yml", "relationships.yml"};
235 String collectionTitle = "node_types";
237 //Create node.yaml - collect all types from normative-types and heat-types directories
238 String[] nodeTypesMainFolders = new String[]{"normative-types", "heat-types"};
240 if(DEPLOYMENT_TYPE_ONAP.equals(deploymentType)){
241 String[] onapNodeTypesMainFolders = new String[]{"nfv-types"};
242 nodeTypesMainFolders=ArrayUtils.addAll(nodeTypesMainFolders,onapNodeTypesMainFolders);
245 for (String nodeTypesMainFolder : nodeTypesMainFolders) {
246 try (Stream<Path> paths = Files.walk(Paths.get(importToscaPath + SEPARATOR + nodeTypesMainFolder))) {
247 paths.filter(path -> path.getFileName().toString().toLowerCase().endsWith(YAML_EXTENSION))
248 .forEach(yamlFile -> {
250 String path = yamlFile.toAbsolutePath().toString();
251 System.out.println("Processing node type file " + path + "...");
252 FileInputStream inputStream = new FileInputStream(path);
253 Yaml yaml = new Yaml();
254 Map<String, Object> load = yaml.loadAs(inputStream, Map.class);
255 Map<String, Object> nodeType = (Map<String, Object>) load.get(collectionTitle);
256 nodeTypeList.putAll(nodeType);
258 } catch (Exception e) {
259 System.err.println("Error in opening file " + yamlFile.toAbsolutePath().toString());
265 createAndSaveSchemaFileYaml("nodes", importFileList, collectionTitle, nodeTypeList);
268 private static void usageAndExit() {
269 SdcSchemaFileImportUsage();
273 private static void SdcSchemaFileImportUsage() {
274 System.err.println("Usage: <file dir/filename> <SDC release number> <Schema conformance level> <configuration dir> <deployment type optional>");
277 private static AnnotationConfigApplicationContext initContext(String appConfigDir) {
278 ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
279 new ConfigurationManager(configurationSource);
280 return new AnnotationConfigApplicationContext(SdcSchemaFileImportConfiguration.class);