import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
import org.openecomp.sdc.be.data.model.ToscaImportByModel;
+import org.openecomp.sdc.be.datatypes.components.ResourceMetadataDataDefinition;
import org.openecomp.sdc.be.datatypes.elements.ArtifactDataDefinition;
import org.openecomp.sdc.be.datatypes.elements.OperationDataDefinition;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.yaml.snakeyaml.DumperOptions;
+import org.yaml.snakeyaml.DumperOptions.FlowStyle;
import org.yaml.snakeyaml.Yaml;
/**
addSchemaFilesFromCassandra(zip, schemaFileZip, nodesFromPackage, definitionsPath);
} else {
//retrieve schema files by model from Cassandra
- addSchemaFilesByModel(zip, component.getModel(), definitionsPath, addDependencies);
+ addSchemaFilesByModel(zip, component.getModel(), definitionsPath, addDependencies,
+ dependencies.stream().map(d -> d.getRight()).collect(Collectors.toList()));
}
Either<CsarDefinition, ResponseFormat> collectedComponentCsarDefinition = collectComponentCsarDefinition(component);
if (collectedComponentCsarDefinition.isRight()) {
for (Triple<String, String, Component> d : dependencies) {
String cassandraId = d.getMiddle();
Component childComponent = d.getRight();
- Either<byte[], ResponseFormat> entryData = getEntryData(cassandraId, childComponent).right()
- .map(componentsUtils::getResponseFormat);
+ Either<byte[], ResponseFormat> entryData = getEntryData(cassandraId, childComponent).right().map(componentsUtils::getResponseFormat);
if (entryData.isRight()) {
return Either.right(entryData.right().value());
}
final String definitionsPath) {
final int initSize = 2048;
LOGGER.debug("Starting copy from Schema file zip to CSAR zip");
- try (final ZipInputStream zipInputStream = new ZipInputStream(new ByteArrayInputStream(
- schemaFileZip)); final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); final BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(
- byteArrayOutputStream, initSize)) {
+ try (final ZipInputStream zipInputStream = new ZipInputStream(new ByteArrayInputStream(schemaFileZip));
+ final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+ final BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(byteArrayOutputStream, initSize)) {
ZipEntry entry;
while ((entry = zipInputStream.getNextEntry()) != null) {
ZipUtils.checkForZipSlipInRead(entry);
}
private void addSchemaFilesByModel(final ZipOutputStream zipOutputStream, final String modelName,
- final String definitionsPath, final boolean isSingleImportsFile) {
+ final String definitionsPath, final boolean isSingleImportsFile,
+ final List<Component> dependencies) {
try {
- final List<ToscaImportByModel> modelDefaultImportList = modelOperation.findAllModelImports(modelName, true);
final Set<Path> writtenEntryPathList = new HashSet<>();
- final var defsPath = Path.of(definitionsPath);
- Map<Path, byte[]> contentToMerge = new HashMap<>();
+ final Path defsPath = Path.of(definitionsPath);
+ final Map<Path, byte[]> contentToMerge = new HashMap<>();
+ final List<ToscaImportByModel> modelDefaultImportList = modelOperation.findAllModelImports(modelName, true);
for (final ToscaImportByModel toscaImportByModel : modelDefaultImportList) {
var importPath = Path.of(toscaImportByModel.getFullPath());
if (!isSingleImportsFile) {
zipOutputStream.putNextEntry(zipEntry);
writtenEntryPathList.add(entry.getKey());
mergingContent = mergeContent(mergingContent, entry.getValue());
+ mergingContent = updateMergingContentFromDependencies(mergingContent, dependencies);
zipOutputStream.write(mergingContent, 0, mergingContent.length);
zipOutputStream.closeEntry();
}
}
}
+ private byte[] updateMergingContentFromDependencies(final byte[] mergingContent, final List<Component> dependencies) {
+ final DumperOptions options = new DumperOptions();
+ options.setDefaultFlowStyle(FlowStyle.BLOCK);
+ final Yaml yaml = new Yaml(options);
+ final Map<String, Object> stringObjectMap = (Map<String, Object>) yaml.load(new String(mergingContent));
+ final Map<String, Object> nodeTypes = (Map<String, Object>) stringObjectMap.get("node_types");
+ for (final Component dependency : dependencies) {
+ final Map<String, Object> dependencyAsMap = yaml.load(yaml.dumpAsMap(dependency));
+ final String toscaResourceName = ((ResourceMetadataDataDefinition) dependency.getComponentMetadataDefinition()
+ .getMetadataDataDefinition()).getToscaResourceName();
+ final Map<String, Object> nodeType = (Map<String, Object>) nodeTypes.get(toscaResourceName);
+ final Map<String, Object> propertiesFromDependency = (Map<String, Object>) ((List) dependencyAsMap.get("properties"))
+ .stream().collect(Collectors.toMap(s -> ((Map<String, Object>) s).get("name"), s -> s));
+ if (MapUtils.isNotEmpty(nodeType) && MapUtils.isNotEmpty(propertiesFromDependency)) {
+ final Map<String, Object> propertiesFromMergingContent = (Map<String, Object>) nodeType.get("properties");
+ final Map<String, Object> updatedMap = updatePropertiesFromDependency(propertiesFromMergingContent, propertiesFromDependency);
+ nodeType.replace("properties", updatedMap);
+ nodeTypes.replace(toscaResourceName, nodeType);
+ }
+ }
+ stringObjectMap.replace("node_types", nodeTypes);
+ return yaml.dumpAsMap(stringObjectMap).getBytes();
+ }
+
+ private Map<String, Object> updatePropertiesFromDependency(final Map<String, Object> propertiesFromMergingContent,
+ final Map<String, Object> propertiesFromDependency) {
+ final Map<String, Object> result = new HashMap<>();
+ for (final Entry<String, Object> entry : propertiesFromDependency.entrySet()) {
+ final Map<String, Object> propertiesMap = new HashMap<>();
+ final String key = entry.getKey();
+ final Object value = entry.getValue();
+ if (propertiesFromMergingContent instanceof Map) {
+ final Object object = propertiesFromMergingContent.get(key);
+ if (object instanceof Map) {
+ ((Map<String, Object>) object).keySet().forEach(s ->
+ propertiesMap.put(s, ((Map<String, Object>) value).get(s))
+ );
+ } else {
+ propertiesMap.putAll(createProperties(value));
+ }
+ } else {
+ propertiesMap.putAll(createProperties(value));
+ }
+ result.put(key, propertiesMap);
+ }
+ return result;
+ }
+
+ private Map<String, Object> createProperties(final Object value) {
+ final Map<String, Object> propertiesMap = new HashMap<>();
+ propertiesMap.put("type", ((Map<String, Object>) value).get("type"));
+ propertiesMap.put("required", ((Map<String, Object>) value).get("required"));
+ final Object entrySchema = ((Map<String, Object>) value).get("entry_schema");
+ if (entrySchema != null) {
+ propertiesMap.put("entry_schema", entrySchema);
+ }
+ return propertiesMap;
+ }
+
private byte[] mergeContent(final byte[] first, final byte[] second) {
byte[] merged = new byte[0];
final Map<String, Object> firstMap = new Yaml().load(new String(first));
final Map<String, Object> secondMap = new Yaml().load(new String(second));
if (MapUtils.isNotEmpty(secondMap)) {
final DumperOptions options = new DumperOptions();
- options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
+ options.setDefaultFlowStyle(FlowStyle.BLOCK);
final Yaml yaml = new Yaml(options);
for (final Entry<String, Object> secondMapEntry : secondMap.entrySet()) {
final Map<String, Object> newMap = new HashMap<>();
}
}
+
}
private void addDependencies(final List<Map<String, Map<String, String>>> imports, final List<Triple<String, String, Component>> dependencies,
final Component fetchedComponent, final boolean isSkipImports) {
final Set<Component> componentsList = new LinkedHashSet<>();
- if (fetchedComponent instanceof Resource && !isSkipImports) {
+ if (fetchedComponent instanceof Resource) {
log.debug("fetchedComponent is a resource {}", fetchedComponent);
final Optional<Map<String, String>> derivedFromMapOfIdToName = getDerivedFromMapOfIdToName(fetchedComponent, componentsList);
if (derivedFromMapOfIdToName.isPresent() && !derivedFromMapOfIdToName.get().isEmpty()) {
}
}
});
- setImports(imports, dependencies, componentsList);
+ setImports(imports, dependencies, componentsList, isSkipImports);
} else {
- setImports(imports, dependencies, fetchedComponent);
+ setImports(imports, dependencies, fetchedComponent, isSkipImports);
}
}
}
* Creates a resource map and adds it to the import list.
*/
private void setImports(final List<Map<String, Map<String, String>>> imports, final List<Triple<String, String, Component>> dependencies,
- final Set<Component> componentsList) {
- componentsList.forEach(component -> setImports(imports, dependencies, component));
+ final Set<Component> componentsList, boolean isSkipImports) {
+ componentsList.forEach(component -> setImports(imports, dependencies, component, isSkipImports));
}
private void setImports(final List<Map<String, Map<String, String>>> imports, final List<Triple<String, String, Component>> dependencies,
- final Component component) {
+ final Component component, boolean isSkipImports) {
final Map<String, ArtifactDefinition> toscaArtifacts = component.getToscaArtifacts();
final ArtifactDefinition artifactDefinition = toscaArtifacts.get(ASSET_TOSCA_TEMPLATE);
if (artifactDefinition != null) {
keyNameBuilder.append(component.getComponentType().toString().toLowerCase());
keyNameBuilder.append("-");
keyNameBuilder.append(component.getName());
- addImports(imports, keyNameBuilder, files);
+ if (!isSkipImports) {
+ addImports(imports, keyNameBuilder, files);
+ }
dependencies.add(new ImmutableTriple<>(artifactName, artifactDefinition.getEsId(), component));
- if (!ModelConverter.isAtomicComponent(component)) {
+ if (!ModelConverter.isAtomicComponent(component) && !isSkipImports) {
final Map<String, String> interfaceFiles = new HashMap<>();
interfaceFiles.put(IMPORTS_FILE_KEY, getInterfaceFilename(artifactName));
keyNameBuilder.append("-interface");