import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.Date;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
import org.openecomp.sdc.be.data.model.ToscaImportByModel;
+import org.openecomp.sdc.be.datatypes.components.ResourceMetadataDataDefinition;
import org.openecomp.sdc.be.datatypes.elements.ArtifactDataDefinition;
import org.openecomp.sdc.be.datatypes.elements.OperationDataDefinition;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
+import org.yaml.snakeyaml.DumperOptions;
+import org.yaml.snakeyaml.DumperOptions.FlowStyle;
import org.yaml.snakeyaml.Yaml;
/**
private static final String TOSCA_META_PATH_FILE_NAME = "TOSCA-Metadata/TOSCA.meta";
private static final String TOSCA_META_VERSION = "1.0";
private static final String CSAR_VERSION = "1.1";
+ private static final String BLOCK_0_TEMPLATE = "SDC-TOSCA-Meta-File-Version: %s\nSDC-TOSCA-Definitions-Version: %s\n";
+ private static final String CSAR_META_PATH_FILE_NAME = "csar.meta";
private static final String SDC_VERSION = ExternalConfiguration.getAppVersion();
public static final String NODES_YML = "nodes.yml";
private static final String CONFORMANCE_LEVEL = ConfigurationManager.getConfigurationManager().getConfiguration().getToscaConformanceLevel();
* @param component the component to create the NS CSAR from
* @return an entry to be added in the Component CSAR by SDC
*/
-
public Either<ZipOutputStream, ResponseFormat> generateCsarZip(Component component,
boolean getFromCS,
ZipOutputStream zip,
} else {
return Either.right(toscaRepresentation.right().value());
}
+
+ if (!isSkipImports) {
+ final String toscaConformanceLevel = ConfigurationManager.getConfigurationManager().getConfiguration().getToscaConformanceLevel();
+ zip.putNextEntry(new ZipEntry(CSAR_META_PATH_FILE_NAME));
+ zip.write(createCsarBlock0(TOSCA_META_VERSION, toscaConformanceLevel).getBytes());
+ }
+
final String fileName = artifactDef.getArtifactName();
- final byte[] toscaBlock0Byte =
- createToscaBlock0(TOSCA_META_VERSION, CSAR_VERSION, component.getCreatorFullName(), fileName, isAsdPackage, definitionsPath).getBytes();
+ final byte[] toscaBlock0Byte = createToscaBlock0(
+ TOSCA_META_VERSION, CSAR_VERSION, component.getCreatorFullName(), fileName, isAsdPackage, definitionsPath, isSkipImports).getBytes();
zip.putNextEntry(new ZipEntry(TOSCA_META_PATH_FILE_NAME));
zip.write(toscaBlock0Byte);
zip.putNextEntry(new ZipEntry(definitionsPath + fileName));
zip.write(mainYaml);
LifecycleStateEnum lifecycleState = component.getLifecycleState();
- addServiceMf(component, zip, lifecycleState, isInCertificationRequest, fileName, mainYaml, definitionsPath);
if (addDependencies) {
+ addServiceMf(component, zip, lifecycleState, isInCertificationRequest, fileName, mainYaml, definitionsPath);
//US798487 - Abstraction of complex types
if (hasToWriteComponentSubstitutionType(component)) {
LOGGER.debug("Component {} is complex - generating abstract type for it..", component.getName());
addSchemaFilesFromCassandra(zip, schemaFileZip, nodesFromPackage, definitionsPath);
} else {
//retrieve schema files by model from Cassandra
- addSchemaFilesByModel(zip, component.getModel(), definitionsPath, addDependencies);
+ addSchemaFilesByModel(zip, component.getModel(), definitionsPath, addDependencies,
+ dependencies.stream().map(d -> d.getRight()).collect(Collectors.toList()));
}
Either<CsarDefinition, ResponseFormat> collectedComponentCsarDefinition = collectComponentCsarDefinition(component);
if (collectedComponentCsarDefinition.isRight()) {
return writeAllFilesToCsar(component, collectedComponentCsarDefinition.left().value(), zip, isInCertificationRequest);
}
+ private String createCsarBlock0(String metaFileVersion, String toscaConformanceLevel) {
+ return String.format(BLOCK_0_TEMPLATE, metaFileVersion, toscaConformanceLevel);
+ }
+
private Either<ToscaRepresentation, ResponseFormat> fetchToscaRepresentation(Component component, boolean getFromCS,
ArtifactDefinition artifactDef, boolean isSkipImports) {
LifecycleStateEnum lifecycleState = component.getLifecycleState();
for (Triple<String, String, Component> d : dependencies) {
String cassandraId = d.getMiddle();
Component childComponent = d.getRight();
- Either<byte[], ResponseFormat> entryData = getEntryData(cassandraId, childComponent).right()
- .map(componentsUtils::getResponseFormat);
+ Either<byte[], ResponseFormat> entryData = getEntryData(cassandraId, childComponent).right().map(componentsUtils::getResponseFormat);
if (entryData.isRight()) {
return Either.right(entryData.right().value());
}
final String definitionsPath) {
final int initSize = 2048;
LOGGER.debug("Starting copy from Schema file zip to CSAR zip");
- try (final ZipInputStream zipInputStream = new ZipInputStream(new ByteArrayInputStream(
- schemaFileZip)); final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); final BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(
- byteArrayOutputStream, initSize)) {
+ try (final ZipInputStream zipInputStream = new ZipInputStream(new ByteArrayInputStream(schemaFileZip));
+ final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+ final BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(byteArrayOutputStream, initSize)) {
ZipEntry entry;
while ((entry = zipInputStream.getNextEntry()) != null) {
ZipUtils.checkForZipSlipInRead(entry);
}
private void addSchemaFilesByModel(final ZipOutputStream zipOutputStream, final String modelName,
- final String definitionsPath, final boolean isSingleImportsFile) {
+ final String definitionsPath, final boolean isSingleImportsFile,
+ final List<Component> dependencies) {
try {
- final List<ToscaImportByModel> modelDefaultImportList = modelOperation.findAllModelImports(modelName, true);
final Set<Path> writtenEntryPathList = new HashSet<>();
- final var defsPath = Path.of(definitionsPath);
- Map<Path, byte[]> contentToMerge = new HashMap<>();
+ final Path defsPath = Path.of(definitionsPath);
+ final Map<Path, byte[]> contentToMerge = new HashMap<>();
+ final List<ToscaImportByModel> modelDefaultImportList = modelOperation.findAllModelImports(modelName, true);
for (final ToscaImportByModel toscaImportByModel : modelDefaultImportList) {
var importPath = Path.of(toscaImportByModel.getFullPath());
if (!isSingleImportsFile) {
contentToMerge.put(entryPath, toscaImportByModel.getContent().getBytes(StandardCharsets.UTF_8));
} else {
if (writtenEntryPathList.contains(defsPath.resolve(importPath))) {
- importPath =
- ToscaDefaultImportHelper.addModelAsFilePrefix(importPath, toscaImportByModel.getModelId());
+ importPath = ToscaDefaultImportHelper.addModelAsFilePrefix(importPath, toscaImportByModel.getModelId());
}
final Path entryPath = defsPath.resolve(importPath);
writtenEntryPathList.add(entryPath);
}
} else {
if (writtenEntryPathList.contains(defsPath.resolve(importPath))) {
- importPath =
- ToscaDefaultImportHelper.addModelAsFilePrefix(importPath, toscaImportByModel.getModelId());
+ importPath = ToscaDefaultImportHelper.addModelAsFilePrefix(importPath, toscaImportByModel.getModelId());
}
final Path entryPath = defsPath.resolve(importPath);
- final var zipEntry = new ZipEntry(entryPath.toString());
- zipOutputStream.putNextEntry(zipEntry);
+ zipOutputStream.putNextEntry(new ZipEntry(entryPath.toString()));
writtenEntryPathList.add(entryPath);
final byte[] content = toscaImportByModel.getContent().getBytes(StandardCharsets.UTF_8);
zipOutputStream.write(content, 0, content.length);
byte[] mergingContent = new byte[0];
for (Map.Entry<Path, byte[]> entry : contentToMerge.entrySet()) {
if (ADDITIONAL_TYPE_DEFINITIONS.equals(Paths.get(String.valueOf(entry.getKey())).normalize().toString())) {
- mergingContent = Bytes.concat(mergingContent, entry.getValue());
+ mergingContent = mergeContent(mergingContent, entry.getValue());
} else {
final var zipEntry = new ZipEntry(entry.getKey().toString());
zipOutputStream.putNextEntry(zipEntry);
writtenEntryPathList.add(entry.getKey());
- final var concat = Bytes.concat(mergingContent, entry.getValue());
- zipOutputStream.write(concat, 0, concat.length);
+ mergingContent = mergeContent(mergingContent, entry.getValue());
+ mergingContent = updateMergingContentFromDependencies(mergingContent, dependencies);
+ zipOutputStream.write(mergingContent, 0, mergingContent.length);
zipOutputStream.closeEntry();
}
}
}
}
+ private byte[] updateMergingContentFromDependencies(final byte[] mergingContent, final List<Component> dependencies) {
+ final DumperOptions options = new DumperOptions();
+ options.setDefaultFlowStyle(FlowStyle.BLOCK);
+ final Yaml yaml = new Yaml(options);
+ final Map<String, Object> stringObjectMap = (Map<String, Object>) yaml.load(new String(mergingContent));
+ final Map<String, Object> nodeTypes = (Map<String, Object>) stringObjectMap.get("node_types");
+ for (final Component dependency : dependencies) {
+ final Map<String, Object> dependencyAsMap = yaml.load(yaml.dumpAsMap(dependency));
+ final String toscaResourceName = ((ResourceMetadataDataDefinition) dependency.getComponentMetadataDefinition()
+ .getMetadataDataDefinition()).getToscaResourceName();
+ final Map<String, Object> nodeType = (Map<String, Object>) nodeTypes.get(toscaResourceName);
+ final Map<String, Object> propertiesFromDependency = (Map<String, Object>) ((List) dependencyAsMap.get("properties"))
+ .stream().collect(Collectors.toMap(s -> ((Map<String, Object>) s).get("name"), s -> s));
+ if (MapUtils.isNotEmpty(nodeType) && MapUtils.isNotEmpty(propertiesFromDependency)) {
+ final Map<String, Object> propertiesFromMergingContent = (Map<String, Object>) nodeType.get("properties");
+ final Map<String, Object> updatedMap = updatePropertiesFromDependency(propertiesFromMergingContent, propertiesFromDependency);
+ nodeType.replace("properties", updatedMap);
+ nodeTypes.replace(toscaResourceName, nodeType);
+ }
+ }
+ stringObjectMap.replace("node_types", nodeTypes);
+ return yaml.dumpAsMap(stringObjectMap).getBytes();
+ }
+
+ private Map<String, Object> updatePropertiesFromDependency(final Map<String, Object> propertiesFromMergingContent,
+ final Map<String, Object> propertiesFromDependency) {
+ final Map<String, Object> result = new HashMap<>();
+ for (final Entry<String, Object> entry : propertiesFromDependency.entrySet()) {
+ final Map<String, Object> propertiesMap = new HashMap<>();
+ final String key = entry.getKey();
+ final Object value = entry.getValue();
+ if (propertiesFromMergingContent instanceof Map) {
+ final Object object = propertiesFromMergingContent.get(key);
+ if (object instanceof Map) {
+ ((Map<String, Object>) object).keySet().forEach(s ->
+ propertiesMap.put(s, getValue(s, (Map<String, Object>) value))
+ );
+ } else {
+ propertiesMap.putAll(createProperties(value));
+ }
+ } else {
+ propertiesMap.putAll(createProperties(value));
+ }
+ result.put(key, propertiesMap);
+ }
+ return result;
+ }
+
+ private Object getValue(final String key, Map<String, Object> value) {
+ final String mappedKey = mapKey(key);
+ if (mappedKey.equals("schemaType")) {
+ return Collections.singletonMap("type", value.get(mappedKey));
+ }
+ return value.get(mappedKey);
+ }
+
+ private String mapKey(final String key) {
+ if (key.equals("entry_schema")) {
+ return "schemaType";
+ }
+ if (key.equals("default")) {
+ return "defaultValue";
+ }
+ return key;
+ }
+
+ private Map<String, Object> createProperties(final Object value) {
+ final Map<String, Object> propertiesMap = new HashMap<>();
+ propertiesMap.put("type", ((Map<String, Object>) value).get("type"));
+ propertiesMap.put("required", ((Map<String, Object>) value).get("required"));
+ final Object entrySchema = ((Map<String, Object>) value).get("entry_schema");
+ if (entrySchema != null) {
+ propertiesMap.put("entry_schema", entrySchema);
+ }
+ return propertiesMap;
+ }
+
+ private byte[] mergeContent(final byte[] first, final byte[] second) {
+ byte[] merged = new byte[0];
+ final Map<String, Object> firstMap = new Yaml().load(new String(first));
+ final Map<String, Object> secondMap = new Yaml().load(new String(second));
+ if (MapUtils.isNotEmpty(secondMap)) {
+ final DumperOptions options = new DumperOptions();
+ options.setDefaultFlowStyle(FlowStyle.BLOCK);
+ final Yaml yaml = new Yaml(options);
+ for (final Entry<String, Object> secondMapEntry : secondMap.entrySet()) {
+ final Map<String, Object> newMap = new HashMap<>();
+ if (secondMapEntry.getKey().endsWith("_types")) {
+ if (MapUtils.isNotEmpty(firstMap) && firstMap.containsKey(secondMapEntry.getKey())) {
+ final Map<String, Object> secondMapEntryValue = (Map<String, Object>) secondMapEntry.getValue();
+ final Map<String, Object> firstMapValue = (Map<String, Object>) firstMap.get(secondMapEntry.getKey());
+ secondMapEntryValue.putAll(firstMapValue);
+ newMap.put(secondMapEntry.getKey(), secondMapEntryValue);
+ } else {
+ newMap.put(secondMapEntry.getKey(), secondMapEntry.getValue());
+ }
+ } else {
+ newMap.put(secondMapEntry.getKey(), secondMapEntry.getValue());
+ }
+ merged = Bytes.concat(merged, yaml.dumpAsMap(newMap).getBytes());
+ }
+ }
+ return merged;
+ }
+
private Either<CsarDefinition, ResponseFormat> collectComponentCsarDefinition(Component component) {
ComponentArtifacts componentArtifacts = new ComponentArtifacts();
Component updatedComponent = component;
}
private String createToscaBlock0(String metaFileVersion, String csarVersion, String createdBy, String entryDef, boolean isAsdPackage,
- String definitionsPath) {
+ String definitionsPath, boolean isSkipImports) {
final String block0template = "TOSCA-Meta-File-Version: %s\nCSAR-Version: %s\nCreated-By: %s\nEntry-Definitions: "
- + definitionsPath + "%s\n%s\nName: csar.meta\nContent-Type: text/plain\n";
+ + definitionsPath + "%s\n%s\n" + (!isSkipImports ? "Name: csar.meta\nContent-Type: text/plain\n" : "");
return String.format(block0template, metaFileVersion, csarVersion, createdBy, entryDef, isAsdPackage ? "entry_definition_type: asd" : "");
}
}
}
+
}