2 * ============LICENSE_START=======================================================
4 * ================================================================================
5 * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
6 * ================================================================================
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 * ============LICENSE_END=========================================================
21 package org.openecomp.sdc.asdctool.main;
23 import org.apache.commons.codec.digest.DigestUtils;
24 import org.apache.commons.lang3.ArrayUtils;
25 import org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum;
26 import org.openecomp.sdc.asdctool.impl.EsToCassandraDataMigrationConfig;
27 import org.openecomp.sdc.be.config.ConfigurationManager;
28 import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
29 import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
30 import org.openecomp.sdc.be.resources.data.SdcSchemaFilesData;
31 import org.openecomp.sdc.common.api.ConfigurationSource;
32 import org.openecomp.sdc.common.impl.ExternalConfiguration;
33 import org.openecomp.sdc.common.impl.FSConfigurationSource;
34 import org.springframework.context.annotation.AnnotationConfigApplicationContext;
35 import org.yaml.snakeyaml.DumperOptions;
36 import org.yaml.snakeyaml.Yaml;
39 import java.nio.file.FileSystems;
40 import java.nio.file.Files;
41 import java.nio.file.Paths;
42 import java.util.Date;
43 import java.util.LinkedHashMap;
45 import java.util.zip.ZipEntry;
46 import java.util.zip.ZipOutputStream;
49 public class SdcSchemaFileImport {
51 private static final String SEPARATOR = FileSystems.getDefault().getSeparator();
53 private static final String TOSCA_VERSION = "tosca_simple_yaml_1_1";
55 private static String importToscaPath;
57 private static final byte[] buffer = new byte[1024];
59 private static final String YAML_EXTENSION = ".yml";
61 private static final String DEPLOYMENT_TYPE_ONAP = "onap";
63 private static String LICENSE_TXT;
65 private static ZipOutputStream zos;
67 public static void main(String[] args) throws Exception {
69 //Generation flow start - generating SDC from normatives
70 System.out.println("Starting SdcSchemaFileImport procedure...");
71 final String FILE_NAME = "SDC.zip";
73 if (args == null || !(args.length ==4 || args.length == 5 )) {
77 importToscaPath = args[0];
78 String sdcReleaseNum = args[1];
79 String conformanceLevel = args[2];
80 String appConfigDir = args[3];
81 String deploymentType=null;
83 deploymentType=args[4];
87 ByteArrayOutputStream baos = new ByteArrayOutputStream();
89 zos = new ZipOutputStream(baos);
91 //Initialize the license text
93 LICENSE_TXT = new String(Files.readAllBytes(Paths.get(appConfigDir + SEPARATOR+"license.txt")));
96 System.err.println("Couldn't read license.txt in location :" + appConfigDir+", error: "+e);
100 //Loop over schema file list and create each yaml file from /import/tosca folder
101 SchemaZipFileEnum[] schemaFileList = SchemaZipFileEnum.values();
102 for (SchemaZipFileEnum schemaZipFileEnum : schemaFileList) {
104 //get the source yaml file
105 String pathname = importToscaPath + SEPARATOR + schemaZipFileEnum.getSourceFolderName() + SEPARATOR + schemaZipFileEnum.getSourceFileName() + YAML_EXTENSION;
106 System.out.println("Processing file "+pathname+"....");
107 InputStream input = new FileInputStream(new File(pathname));
108 //Convert the content of file to yaml
109 Yaml yamlFileSource = new Yaml();
110 Object content = yamlFileSource.load(input);
112 createAndSaveSchemaFileYaml(schemaZipFileEnum, content);
115 System.err.println("Error in file creation : " + schemaZipFileEnum.getFileName() + ", " + e.getMessage());
120 createAndSaveNodeSchemaFile(deploymentType);
123 //close the ZipOutputStream
125 System.out.println("File SDC.zip creation successful");
127 } catch(Exception ex) {
128 System.err.println("Failed to pack SDC.zip file, error: "+ex);
132 //Generation flow end - generating SDC from narratives
134 AnnotationConfigApplicationContext context = initContext(appConfigDir);
135 SdcSchemaFilesCassandraDao schemaFilesCassandraDao = (SdcSchemaFilesCassandraDao) context.getBean("sdc-schema-files-cassandra-dao");
137 byte[] fileBytes = baos.toByteArray();
139 Date date = new Date();
140 String md5Hex = DigestUtils.md5Hex(fileBytes);
142 SdcSchemaFilesData schemeFileData = new SdcSchemaFilesData(sdcReleaseNum, date, conformanceLevel, FILE_NAME, fileBytes, md5Hex);
143 CassandraOperationStatus saveSchemaFile = schemaFilesCassandraDao.saveSchemaFile(schemeFileData);
145 if(!saveSchemaFile.equals(CassandraOperationStatus.OK)) {
146 System.err.println("SdcSchemaFileImport failed cassandra error" + saveSchemaFile);
150 System.out.println("SdcSchemaFileImport successfully completed");
155 public static void createAndSaveSchemaFileYaml(SchemaZipFileEnum schemaZipFileEnum, Object content) {
156 createAndSaveSchemaFileYaml(schemaZipFileEnum.getFileName(), schemaZipFileEnum.getImportFileList(), schemaZipFileEnum.getCollectionTitle(), content);
159 public static void createAndSaveSchemaFileYaml(String fileName, String[] importFileList, String collectionTitle, Object content) {
161 //Initialize the snake yaml dumper option
162 DumperOptions options = new DumperOptions();
163 options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
165 //Create the new yaml
166 Yaml yaml = new Yaml(options);
167 yaml.setName(fileName);
169 //Initialize the yaml contents
170 Map<String, Object> data = new LinkedHashMap<>();
172 data.put("tosca_definitions_version", TOSCA_VERSION);
174 if (importFileList.length > 0) {
175 data.put("imports", importFileList);
178 data.put(collectionTitle, content);
180 //Save the new yaml to file
184 File file = File.createTempFile(fileName, YAML_EXTENSION);
185 writer = new FileWriter(file);
187 //Add the license as comment in top of file
188 writer.write(LICENSE_TXT);
190 yaml.dump(data, writer);
194 // begin writing a new ZIP entry, positions the stream to the start of the entry data
195 ZipEntry entry = new ZipEntry(yaml.getName() + YAML_EXTENSION);
196 zos.putNextEntry(entry);
197 FileInputStream stream = new FileInputStream(file.getAbsolutePath());
199 while ((len = stream.read(buffer)) > 0) {
200 zos.write(buffer, 0, len);
202 //close the InputStream
208 } catch (IOException e) {
209 System.out.println("Error in file creation : " + fileName + ", " + e.getMessage());
215 *the method is responsible for creating and storing the sdc normatives in the DB
216 * @param deploymentType if the deployments type is onap the onap narratives will be add to the zip
217 * @throws IOException thrown in case of issues in reding files.
219 public static void createAndSaveNodeSchemaFile(String deploymentType) throws IOException {
221 //Initialize the snake yaml dumper option
222 DumperOptions options = new DumperOptions();
223 options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
225 Map<String, Object> nodeTypeList = new LinkedHashMap<>();
227 String[] importFileList = new String[]{"data.yml", "artifacts.yml", "capabilities.yml", "interfaces.yml", "relationships.yml"};
228 String collectionTitle = "node_types";
230 //Create node.yaml - collect all types from normative-types and heat-types directories
231 String[] nodeTypesMainFolders = new String[]{"normative-types", "heat-types"};
233 if(DEPLOYMENT_TYPE_ONAP.equals(deploymentType)){
234 String[] onapNodeTypesMainFolders = new String[]{"nfv-types"};
235 nodeTypesMainFolders=ArrayUtils.addAll(nodeTypesMainFolders,onapNodeTypesMainFolders);
238 for (String nodeTypesMainFolder : nodeTypesMainFolders) {
239 Files.walk(Paths.get(importToscaPath + SEPARATOR + nodeTypesMainFolder))
240 .filter(path -> path.getFileName().toString().toLowerCase().endsWith(YAML_EXTENSION))
241 .forEach(yamlFile -> {
243 String path = yamlFile.toAbsolutePath().toString();
244 System.out.println("Processing node type file "+path+"...");
245 FileInputStream inputStream = new FileInputStream(path);
246 Yaml yaml = new Yaml();
247 Map<String, Object> load = yaml.loadAs(inputStream,Map.class);
248 Map<String, Object> nodeType = (Map<String, Object>) load.get(collectionTitle);
249 nodeTypeList.putAll(nodeType);
251 } catch (Exception e) {
252 System.err.println("Error in opening file " + yamlFile.toAbsolutePath().toString());
257 createAndSaveSchemaFileYaml("nodes", importFileList, collectionTitle, nodeTypeList);
260 private static void usageAndExit() {
261 SdcSchemaFileImportUsage();
265 private static void SdcSchemaFileImportUsage() {
266 System.err.println("Usage: <file dir/filename> <SDC release number> <Schema conformance level> <configuration dir> <deployment type optional>");
269 private static AnnotationConfigApplicationContext initContext(String appConfigDir) {
270 ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
271 new ConfigurationManager(configurationSource);
272 return new AnnotationConfigApplicationContext(EsToCassandraDataMigrationConfig.class);